summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/AbstractActor.scala1
-rw-r--r--src/actors/scala/actors/Actor.scala2
-rw-r--r--src/actors/scala/actors/ActorRef.scala3
-rw-r--r--src/actors/scala/actors/CanReply.scala1
-rw-r--r--src/actors/scala/actors/Channel.scala2
-rw-r--r--src/actors/scala/actors/DaemonActor.scala1
-rw-r--r--src/actors/scala/actors/Debug.scala1
-rw-r--r--src/actors/scala/actors/Future.scala4
-rw-r--r--src/actors/scala/actors/IScheduler.scala1
-rw-r--r--src/actors/scala/actors/InputChannel.scala1
-rw-r--r--src/actors/scala/actors/InternalActor.scala2
-rw-r--r--src/actors/scala/actors/InternalReplyReactor.scala1
-rw-r--r--src/actors/scala/actors/OutputChannel.scala1
-rw-r--r--src/actors/scala/actors/Reactor.scala1
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala2
-rw-r--r--src/actors/scala/actors/Scheduler.scala1
-rw-r--r--src/actors/scala/actors/SchedulerAdapter.scala1
-rw-r--r--src/actors/scala/actors/UncaughtException.scala1
-rw-r--r--src/actors/scala/actors/package.scala1
-rw-r--r--src/actors/scala/actors/remote/JavaSerializer.scala1
-rw-r--r--src/actors/scala/actors/remote/RemoteActor.scala2
-rw-r--r--src/actors/scala/actors/remote/Serializer.scala1
-rw-r--r--src/actors/scala/actors/remote/Service.scala1
-rw-r--r--src/actors/scala/actors/remote/TcpService.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ActorGC.scala1
-rw-r--r--src/actors/scala/actors/scheduler/DaemonScheduler.scala1
-rw-r--r--src/actors/scala/actors/scheduler/ExecutorScheduler.scala2
-rw-r--r--src/actors/scala/actors/scheduler/ForkJoinScheduler.scala3
-rw-r--r--src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala3
-rw-r--r--src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala1
-rw-r--r--src/actors/scala/actors/scheduler/TerminationService.scala2
-rw-r--r--src/android-library/scala/reflect/ScalaBeanInfo.scala1
-rw-r--r--src/asm/scala/tools/asm/AnnotationVisitor.java66
-rw-r--r--src/asm/scala/tools/asm/AnnotationWriter.java58
-rw-r--r--src/asm/scala/tools/asm/Attribute.java193
-rw-r--r--src/asm/scala/tools/asm/ByteVector.java49
-rw-r--r--src/asm/scala/tools/asm/ClassReader.java2980
-rw-r--r--src/asm/scala/tools/asm/ClassVisitor.java233
-rw-r--r--src/asm/scala/tools/asm/ClassWriter.java499
-rw-r--r--src/asm/scala/tools/asm/Context.java110
-rw-r--r--src/asm/scala/tools/asm/FieldVisitor.java34
-rw-r--r--src/asm/scala/tools/asm/FieldWriter.java72
-rw-r--r--src/asm/scala/tools/asm/Frame.java1024
-rw-r--r--src/asm/scala/tools/asm/Handle.java48
-rw-r--r--src/asm/scala/tools/asm/Handler.java9
-rw-r--r--src/asm/scala/tools/asm/Item.java162
-rw-r--r--src/asm/scala/tools/asm/Label.java135
-rw-r--r--src/asm/scala/tools/asm/MethodVisitor.java516
-rw-r--r--src/asm/scala/tools/asm/MethodWriter.java1165
-rw-r--r--src/asm/scala/tools/asm/Type.java254
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureReader.java181
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureVisitor.java51
-rw-r--r--src/asm/scala/tools/asm/signature/SignatureWriter.java2
-rw-r--r--src/asm/scala/tools/asm/tree/AbstractInsnNode.java30
-rw-r--r--src/asm/scala/tools/asm/tree/AnnotationNode.java55
-rw-r--r--src/asm/scala/tools/asm/tree/ClassNode.java102
-rw-r--r--src/asm/scala/tools/asm/tree/FieldInsnNode.java34
-rw-r--r--src/asm/scala/tools/asm/tree/FieldNode.java104
-rw-r--r--src/asm/scala/tools/asm/tree/FrameNode.java121
-rw-r--r--src/asm/scala/tools/asm/tree/IincInsnNode.java8
-rw-r--r--src/asm/scala/tools/asm/tree/InnerClassNode.java42
-rw-r--r--src/asm/scala/tools/asm/tree/InsnList.java128
-rw-r--r--src/asm/scala/tools/asm/tree/InsnNode.java33
-rw-r--r--src/asm/scala/tools/asm/tree/IntInsnNode.java13
-rw-r--r--src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java22
-rw-r--r--src/asm/scala/tools/asm/tree/JumpInsnNode.java25
-rw-r--r--src/asm/scala/tools/asm/tree/LabelNode.java2
-rw-r--r--src/asm/scala/tools/asm/tree/LdcInsnNode.java11
-rw-r--r--src/asm/scala/tools/asm/tree/LineNumberNode.java8
-rw-r--r--src/asm/scala/tools/asm/tree/LocalVariableNode.java45
-rw-r--r--src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java21
-rw-r--r--src/asm/scala/tools/asm/tree/MethodInsnNode.java38
-rw-r--r--src/asm/scala/tools/asm/tree/MethodNode.java236
-rw-r--r--src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java10
-rw-r--r--src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java30
-rw-r--r--src/asm/scala/tools/asm/tree/TryCatchBlockNode.java32
-rw-r--r--src/asm/scala/tools/asm/tree/TypeInsnNode.java19
-rw-r--r--src/asm/scala/tools/asm/tree/VarInsnNode.java21
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Analyzer.java160
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java11
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java483
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicValue.java9
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java598
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Frame.java854
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Interpreter.java110
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java119
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java148
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/SourceValue.java8
-rw-r--r--src/asm/scala/tools/asm/tree/analysis/Subroutine.java9
-rw-r--r--src/asm/scala/tools/asm/util/ASMifiable.java13
-rw-r--r--src/asm/scala/tools/asm/util/ASMifier.java485
-rw-r--r--src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java28
-rw-r--r--src/asm/scala/tools/asm/util/CheckClassAdapter.java587
-rw-r--r--src/asm/scala/tools/asm/util/CheckFieldAdapter.java23
-rw-r--r--src/asm/scala/tools/asm/util/CheckMethodAdapter.java937
-rw-r--r--src/asm/scala/tools/asm/util/CheckSignatureAdapter.java57
-rw-r--r--src/asm/scala/tools/asm/util/Printer.java316
-rw-r--r--src/asm/scala/tools/asm/util/SignatureChecker.java47
-rw-r--r--src/asm/scala/tools/asm/util/Textifiable.java8
-rw-r--r--src/asm/scala/tools/asm/util/Textifier.java446
-rw-r--r--src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java23
-rw-r--r--src/asm/scala/tools/asm/util/TraceClassVisitor.java159
-rw-r--r--src/asm/scala/tools/asm/util/TraceFieldVisitor.java10
-rw-r--r--src/asm/scala/tools/asm/util/TraceMethodVisitor.java89
-rw-r--r--src/asm/scala/tools/asm/util/TraceSignatureVisitor.java59
-rw-r--r--src/build/genprod.scala5
-rw-r--r--src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala33
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala113
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Resolvers.scala91
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Validators.scala193
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Aliases.scala (renamed from src/compiler/scala/reflect/macros/runtime/Aliases.scala)5
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Context.scala29
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Enclosures.scala36
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Evals.scala (renamed from src/compiler/scala/reflect/macros/runtime/Evals.scala)4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/ExprUtils.scala (renamed from src/compiler/scala/reflect/macros/runtime/ExprUtils.scala)3
-rw-r--r--src/compiler/scala/reflect/macros/contexts/FrontEnds.scala (renamed from src/compiler/scala/reflect/macros/runtime/FrontEnds.scala)4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Infrastructure.scala (renamed from src/compiler/scala/reflect/macros/runtime/Infrastructure.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala (renamed from src/compiler/scala/reflect/macros/runtime/Names.scala)15
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Parsers.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Reifiers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Reifiers.scala)8
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Synthetics.scala66
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Traces.scala (renamed from src/compiler/scala/reflect/macros/runtime/Traces.scala)2
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Typers.scala (renamed from src/compiler/scala/reflect/macros/runtime/Typers.scala)8
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Context.scala28
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Enclosures.scala24
-rw-r--r--src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala31
-rw-r--r--src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala74
-rw-r--r--src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala33
-rw-r--r--src/compiler/scala/reflect/macros/runtime/package.scala5
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala71
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala6
-rw-r--r--src/compiler/scala/reflect/reify/Phases.scala5
-rw-r--r--src/compiler/scala/reflect/reify/Reifier.scala29
-rw-r--r--src/compiler/scala/reflect/reify/States.scala1
-rw-r--r--src/compiler/scala/reflect/reify/Taggers.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenNames.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenPositions.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenSymbols.scala11
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala24
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala5
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala63
-rw-r--r--src/compiler/scala/reflect/reify/package.scala14
-rw-r--r--src/compiler/scala/reflect/reify/phases/Calculate.scala3
-rw-r--r--src/compiler/scala/reflect/reify/phases/Metalevels.scala23
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala6
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala46
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala4
-rw-r--r--src/compiler/scala/reflect/reify/utils/NodePrinters.scala22
-rw-r--r--src/compiler/scala/reflect/reify/utils/SymbolTables.scala16
-rw-r--r--src/compiler/scala/tools/ant/Pack200Task.scala6
-rw-r--r--src/compiler/scala/tools/ant/Same.scala7
-rw-r--r--src/compiler/scala/tools/ant/ScalaTool.scala14
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala21
-rw-r--r--src/compiler/scala/tools/ant/antlib.xml2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Break.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Make.scala3
-rw-r--r--src/compiler/scala/tools/ant/sabbus/ScalacFork.scala12
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/TaskArgs.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Use.scala7
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl3
-rw-r--r--src/compiler/scala/tools/cmd/CommandLine.scala6
-rw-r--r--src/compiler/scala/tools/cmd/Demo.scala3
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala11
-rw-r--r--src/compiler/scala/tools/cmd/Interpolation.scala3
-rw-r--r--src/compiler/scala/tools/cmd/Reference.scala3
-rw-r--r--src/compiler/scala/tools/cmd/gen/AnyVals.scala31
-rw-r--r--src/compiler/scala/tools/cmd/gen/Codegen.scala2
-rw-r--r--src/compiler/scala/tools/cmd/gen/CodegenSpec.scala6
-rw-r--r--src/compiler/scala/tools/cmd/package.scala3
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala51
-rw-r--r--src/compiler/scala/tools/nsc/CompileClient.scala10
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala19
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala16
-rw-r--r--src/compiler/scala/tools/nsc/CompilerCommand.scala14
-rw-r--r--src/compiler/scala/tools/nsc/CompilerRun.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ConsoleWriter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/Driver.scala13
-rw-r--r--src/compiler/scala/tools/nsc/EvalLoop.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/GenericRunnerSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala546
-rw-r--r--src/compiler/scala/tools/nsc/Main.scala80
-rw-r--r--src/compiler/scala/tools/nsc/MainBench.scala16
-rw-r--r--src/compiler/scala/tools/nsc/MainTokenMetric.scala13
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala6
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala6
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala16
-rw-r--r--src/compiler/scala/tools/nsc/Phases.scala3
-rw-r--r--src/compiler/scala/tools/nsc/Properties.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ScriptRunner.scala33
-rw-r--r--src/compiler/scala/tools/nsc/SubComponent.scala4
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala27
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala28
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala11
-rw-r--r--src/compiler/scala/tools/nsc/ast/Printers.scala97
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala30
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala93
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala138
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala74
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala59
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala68
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala571
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala290
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala92
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Tokens.scala38
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala103
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/MSILPlatform.scala69
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala32
-rw-r--r--src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala5
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala141
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala23
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala764
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala94
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/ICodes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala206
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala95
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala137
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Primitives.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala66
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Repository.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala73
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala94
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala93
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala105
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala780
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala62
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala1921
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala28
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala142
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala2358
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala27
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala620
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala40
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala13
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala82
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala18
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala254
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Files.scala177
-rw-r--r--src/compiler/scala/tools/nsc/interactive/BuildManager.scala93
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala285
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala355
-rw-r--r--src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala103
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala50
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala125
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/package.scala49
-rw-r--r--src/compiler/scala/tools/nsc/io/Fileish.scala33
-rw-r--r--src/compiler/scala/tools/nsc/io/Jar.scala38
-rw-r--r--src/compiler/scala/tools/nsc/io/Lexer.scala16
-rw-r--r--src/compiler/scala/tools/nsc/io/MsilFile.scala18
-rw-r--r--src/compiler/scala/tools/nsc/io/Pickler.scala93
-rw-r--r--src/compiler/scala/tools/nsc/io/PrettyWriter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Replayer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Socket.scala5
-rw-r--r--src/compiler/scala/tools/nsc/io/SourceReader.scala7
-rw-r--r--src/compiler/scala/tools/nsc/io/package.scala17
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala205
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala288
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaTokens.scala6
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala138
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala259
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala193
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala870
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala137
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala499
-rw-r--r--src/compiler/scala/tools/nsc/package.scala8
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugin.scala128
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginComponent.scala8
-rw-r--r--src/compiler/scala/tools/nsc/plugins/PluginDescription.scala51
-rw-r--r--src/compiler/scala/tools/nsc/plugins/Plugins.scala26
-rw-r--r--src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala14
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala12
-rw-r--r--src/compiler/scala/tools/nsc/reporters/Reporter.scala9
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/Mixer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala2
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/AbsSettings.scala13
-rw-r--r--src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala77
-rw-r--r--src/compiler/scala/tools/nsc/settings/AestheticSettings.scala39
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala73
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala39
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala51
-rw-r--r--src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala9
-rw-r--r--src/compiler/scala/tools/nsc/settings/Warnings.scala25
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala61
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala10
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala7
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala677
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala324
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala178
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala137
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala850
-rw-r--r--src/compiler/scala/tools/nsc/transform/AddInterfaces.scala26
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala404
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala316
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala288
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala126
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/Flatten.scala35
-rw-r--r--src/compiler/scala/tools/nsc/transform/InfoTransform.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/InlineErasure.scala10
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala93
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala267
-rw-r--r--src/compiler/scala/tools/nsc/transform/OverridingPairs.scala20
-rw-r--r--src/compiler/scala/tools/nsc/transform/PostErasure.scala60
-rw-r--r--src/compiler/scala/tools/nsc/transform/SampleTransform.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala488
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala256
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypingTransformers.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala263
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala28
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala32
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala27
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala66
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala44
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala21
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Adaptations.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Checkable.scala49
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala343
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala1272
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala96
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala20
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala292
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala1468
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala979
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala151
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Modes.scala140
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala290
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala71
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala472
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala149
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala72
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala166
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Tags.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala40
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala162
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala)61
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala3373
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala29
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala94
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala90
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLineParser.scala5
-rwxr-xr-xsrc/compiler/scala/tools/nsc/util/DocStrings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/Exceptional.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala66
-rw-r--r--src/compiler/scala/tools/nsc/util/MsilClassPath.scala169
-rw-r--r--src/compiler/scala/tools/nsc/util/ShowPickled.scala24
-rw-r--r--src/compiler/scala/tools/nsc/util/SimpleTracer.scala3
-rw-r--r--src/compiler/scala/tools/nsc/util/WorkScheduler.scala6
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala45
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala49
-rw-r--r--src/compiler/scala/tools/reflect/FrontEnd.scala2
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala11
-rw-r--r--src/compiler/scala/tools/reflect/ReflectMain.scala3
-rw-r--r--src/compiler/scala/tools/reflect/StdTags.scala5
-rw-r--r--src/compiler/scala/tools/reflect/ToolBox.scala2
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala109
-rw-r--r--src/compiler/scala/tools/reflect/WrappedProperties.scala1
-rw-r--r--src/compiler/scala/tools/reflect/package.scala5
-rw-r--r--src/compiler/scala/tools/util/Javap.scala157
-rw-r--r--src/compiler/scala/tools/util/PathResolver.scala182
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala7
-rw-r--r--src/compiler/scala/tools/util/VerifyClass.scala2
-rw-r--r--src/continuations/library/scala/util/continuations/ControlContext.scala4
-rw-r--r--src/continuations/library/scala/util/continuations/package.scala8
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala67
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala7
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala22
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala5
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala27
-rw-r--r--src/detach/library/scala/remoting/Channel.scala190
-rw-r--r--src/detach/library/scala/remoting/Debug.scala27
-rw-r--r--src/detach/library/scala/remoting/ServerChannel.scala68
-rw-r--r--src/detach/library/scala/remoting/detach.scala49
-rw-r--r--src/detach/library/scala/runtime/RemoteRef.scala182
-rw-r--r--src/detach/library/scala/runtime/remoting/Debug.scala85
-rw-r--r--src/detach/library/scala/runtime/remoting/RegistryDelegate.scala192
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteByteRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteCharRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala50
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala50
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteGC.scala66
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteIntRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteLongRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala51
-rw-r--r--src/detach/library/scala/runtime/remoting/RemoteShortRef.scala50
-rw-r--r--src/detach/plugin/scala/tools/detach/Detach.scala1190
-rw-r--r--src/detach/plugin/scala/tools/detach/DetachPlugin.scala41
-rw-r--r--src/detach/plugin/scalac-plugin.xml4
-rw-r--r--src/eclipse/README.md4
-rw-r--r--src/eclipse/fjbg/.classpath7
-rw-r--r--src/eclipse/interactive/.classpath10
-rw-r--r--src/eclipse/interactive/.project35
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/repl/.classpath11
-rw-r--r--src/eclipse/repl/.project35
-rw-r--r--src/eclipse/scala-compiler/.classpath2
-rw-r--r--src/eclipse/scaladoc/.classpath13
-rw-r--r--src/eclipse/scaladoc/.project (renamed from src/eclipse/fjbg/.project)15
-rw-r--r--src/eclipse/scalap/.classpath1
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java195
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java35
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java62
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java84
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java101
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JClass.java420
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCode.java1308
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java125
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java377
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java771
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java69
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java83
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java90
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java667
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JField.java62
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java138
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java201
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLabel.java30
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java121
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java42
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java167
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMember.java109
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMethod.java199
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java87
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java65
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java1267
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java77
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java19
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java69
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java282
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JType.java316
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/Main.java131
-rw-r--r--src/fjbg/ch/epfl/lamp/util/ByteArray.java145
-rw-r--r--src/intellij/compiler.iml.SAMPLE2
-rw-r--r--src/intellij/continuations-library.iml.SAMPLE (renamed from src/intellij/msil.iml.SAMPLE)5
-rw-r--r--src/intellij/continuations-plugin.iml.SAMPLE25
-rw-r--r--src/intellij/fjbg.iml.SAMPLE12
-rw-r--r--src/intellij/interactive.iml.SAMPLE25
-rw-r--r--src/intellij/repl.iml.SAMPLE25
-rw-r--r--src/intellij/scala-lang.ipr.SAMPLE8
-rw-r--r--src/intellij/scaladoc.iml.SAMPLE24
-rw-r--r--src/intellij/test.iml.SAMPLE2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/CompilerControl.scala (renamed from src/compiler/scala/tools/nsc/interactive/CompilerControl.scala)38
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ContextTrees.scala (renamed from src/compiler/scala/tools/nsc/interactive/ContextTrees.scala)1
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala (renamed from src/compiler/scala/tools/nsc/interactive/Global.scala)219
-rw-r--r--src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala (renamed from src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Main.scala34
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala (renamed from src/compiler/scala/tools/nsc/interactive/Picklers.scala)10
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala (renamed from src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/REPL.scala (renamed from src/compiler/scala/tools/nsc/interactive/REPL.scala)27
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RangePositions.scala14
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Response.scala (renamed from src/compiler/scala/tools/nsc/interactive/Response.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala (renamed from src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala (renamed from src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala)7
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala)22
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala)7
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/Tester.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/Tester.scala)11
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala)17
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala)12
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala)3
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala)6
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala)0
-rw-r--r--src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala (renamed from src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala)0
-rw-r--r--src/jline/build.sbt10
-rwxr-xr-xsrc/jline/manual-test.sh5
-rw-r--r--src/jline/project/build.properties1
-rw-r--r--src/jline/project/plugins.sbt3
-rw-r--r--src/jline/project/plugins/build.sbt5
-rw-r--r--src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java2
-rw-r--r--src/library-aux/scala/Any.scala2
-rw-r--r--src/library-aux/scala/AnyRef.scala2
-rw-r--r--src/library-aux/scala/Nothing.scala2
-rw-r--r--src/library-aux/scala/Null.scala2
-rw-r--r--src/library/scala/AnyVal.scala2
-rw-r--r--src/library/scala/Application.scala79
-rw-r--r--src/library/scala/Array.scala14
-rw-r--r--src/library/scala/Boolean.scala6
-rw-r--r--src/library/scala/Byte.scala6
-rw-r--r--src/library/scala/Char.scala6
-rw-r--r--src/library/scala/Console.scala344
-rw-r--r--src/library/scala/Double.scala6
-rw-r--r--src/library/scala/Enumeration.scala19
-rw-r--r--src/library/scala/Float.scala6
-rw-r--r--src/library/scala/Function0.scala4
-rw-r--r--src/library/scala/Function1.scala2
-rw-r--r--src/library/scala/Function10.scala2
-rw-r--r--src/library/scala/Function11.scala2
-rw-r--r--src/library/scala/Function12.scala2
-rw-r--r--src/library/scala/Function13.scala2
-rw-r--r--src/library/scala/Function14.scala2
-rw-r--r--src/library/scala/Function15.scala2
-rw-r--r--src/library/scala/Function16.scala2
-rw-r--r--src/library/scala/Function17.scala2
-rw-r--r--src/library/scala/Function18.scala2
-rw-r--r--src/library/scala/Function19.scala2
-rw-r--r--src/library/scala/Function2.scala2
-rw-r--r--src/library/scala/Function20.scala2
-rw-r--r--src/library/scala/Function21.scala2
-rw-r--r--src/library/scala/Function22.scala2
-rw-r--r--src/library/scala/Function3.scala2
-rw-r--r--src/library/scala/Function4.scala2
-rw-r--r--src/library/scala/Function5.scala2
-rw-r--r--src/library/scala/Function6.scala2
-rw-r--r--src/library/scala/Function7.scala2
-rw-r--r--src/library/scala/Function8.scala2
-rw-r--r--src/library/scala/Function9.scala2
-rw-r--r--src/library/scala/Int.scala6
-rw-r--r--src/library/scala/Long.scala6
-rw-r--r--src/library/scala/LowPriorityImplicits.scala95
-rw-r--r--src/library/scala/NotNull.scala2
-rw-r--r--src/library/scala/Option.scala11
-rw-r--r--src/library/scala/Predef.scala271
-rw-r--r--src/library/scala/Product.scala2
-rw-r--r--src/library/scala/Product1.scala2
-rw-r--r--src/library/scala/Product10.scala2
-rw-r--r--src/library/scala/Product11.scala2
-rw-r--r--src/library/scala/Product12.scala2
-rw-r--r--src/library/scala/Product13.scala2
-rw-r--r--src/library/scala/Product14.scala2
-rw-r--r--src/library/scala/Product15.scala2
-rw-r--r--src/library/scala/Product16.scala2
-rw-r--r--src/library/scala/Product17.scala2
-rw-r--r--src/library/scala/Product18.scala2
-rw-r--r--src/library/scala/Product19.scala2
-rw-r--r--src/library/scala/Product2.scala2
-rw-r--r--src/library/scala/Product20.scala2
-rw-r--r--src/library/scala/Product21.scala2
-rw-r--r--src/library/scala/Product22.scala2
-rw-r--r--src/library/scala/Product3.scala2
-rw-r--r--src/library/scala/Product4.scala2
-rw-r--r--src/library/scala/Product5.scala2
-rw-r--r--src/library/scala/Product6.scala2
-rw-r--r--src/library/scala/Product7.scala2
-rw-r--r--src/library/scala/Product8.scala2
-rw-r--r--src/library/scala/Product9.scala2
-rw-r--r--src/library/scala/SerialVersionUID.scala2
-rw-r--r--src/library/scala/Short.scala6
-rw-r--r--src/library/scala/Specializable.scala2
-rw-r--r--src/library/scala/StringContext.scala2
-rw-r--r--src/library/scala/Tuple1.scala3
-rw-r--r--src/library/scala/Tuple10.scala3
-rw-r--r--src/library/scala/Tuple11.scala3
-rw-r--r--src/library/scala/Tuple12.scala3
-rw-r--r--src/library/scala/Tuple13.scala3
-rw-r--r--src/library/scala/Tuple14.scala3
-rw-r--r--src/library/scala/Tuple15.scala3
-rw-r--r--src/library/scala/Tuple16.scala3
-rw-r--r--src/library/scala/Tuple17.scala3
-rw-r--r--src/library/scala/Tuple18.scala3
-rw-r--r--src/library/scala/Tuple19.scala3
-rw-r--r--src/library/scala/Tuple2.scala3
-rw-r--r--src/library/scala/Tuple20.scala3
-rw-r--r--src/library/scala/Tuple21.scala3
-rw-r--r--src/library/scala/Tuple22.scala3
-rw-r--r--src/library/scala/Tuple3.scala3
-rw-r--r--src/library/scala/Tuple4.scala3
-rw-r--r--src/library/scala/Tuple5.scala3
-rw-r--r--src/library/scala/Tuple6.scala3
-rw-r--r--src/library/scala/Tuple7.scala3
-rw-r--r--src/library/scala/Tuple8.scala3
-rw-r--r--src/library/scala/Tuple9.scala3
-rw-r--r--src/library/scala/UninitializedFieldError.scala6
-rw-r--r--src/library/scala/Unit.scala2
-rw-r--r--src/library/scala/annotation/serializable.scala15
-rw-r--r--src/library/scala/annotation/target/package.scala29
-rw-r--r--src/library/scala/beans/ScalaBeanInfo.scala6
-rw-r--r--src/library/scala/collection/BitSet.scala3
-rw-r--r--src/library/scala/collection/BitSetLike.scala30
-rw-r--r--src/library/scala/collection/BufferedIterator.scala3
-rw-r--r--src/library/scala/collection/CustomParallelizable.scala3
-rw-r--r--src/library/scala/collection/DefaultMap.scala13
-rw-r--r--src/library/scala/collection/GenIterable.scala3
-rw-r--r--src/library/scala/collection/GenIterableLike.scala5
-rw-r--r--src/library/scala/collection/GenIterableView.scala10
-rw-r--r--src/library/scala/collection/GenIterableViewLike.scala10
-rw-r--r--src/library/scala/collection/GenMap.scala3
-rw-r--r--src/library/scala/collection/GenMapLike.scala3
-rw-r--r--src/library/scala/collection/GenSeq.scala3
-rw-r--r--src/library/scala/collection/GenSeqLike.scala3
-rw-r--r--src/library/scala/collection/GenSeqView.scala10
-rw-r--r--src/library/scala/collection/GenSeqViewLike.scala3
-rw-r--r--src/library/scala/collection/GenSet.scala3
-rw-r--r--src/library/scala/collection/GenSetLike.scala3
-rw-r--r--src/library/scala/collection/GenTraversable.scala3
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala5
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala21
-rw-r--r--src/library/scala/collection/GenTraversableView.scala10
-rw-r--r--src/library/scala/collection/GenTraversableViewLike.scala5
-rw-r--r--src/library/scala/collection/IndexedSeq.scala10
-rw-r--r--src/library/scala/collection/IndexedSeqLike.scala9
-rwxr-xr-xsrc/library/scala/collection/IndexedSeqOptimized.scala20
-rw-r--r--src/library/scala/collection/Iterable.scala4
-rw-r--r--src/library/scala/collection/IterableLike.scala48
-rw-r--r--src/library/scala/collection/IterableProxy.scala5
-rw-r--r--src/library/scala/collection/IterableProxyLike.scala3
-rw-r--r--src/library/scala/collection/IterableView.scala3
-rw-r--r--src/library/scala/collection/IterableViewLike.scala4
-rw-r--r--src/library/scala/collection/Iterator.scala77
-rw-r--r--src/library/scala/collection/JavaConversions.scala86
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala53
-rw-r--r--src/library/scala/collection/LinearSeq.scala3
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala11
-rwxr-xr-xsrc/library/scala/collection/LinearSeqOptimized.scala19
-rw-r--r--src/library/scala/collection/Map.scala3
-rw-r--r--src/library/scala/collection/MapLike.scala8
-rw-r--r--src/library/scala/collection/MapProxy.scala3
-rw-r--r--src/library/scala/collection/MapProxyLike.scala5
-rw-r--r--src/library/scala/collection/Parallel.scala3
-rw-r--r--src/library/scala/collection/Parallelizable.scala5
-rw-r--r--src/library/scala/collection/Searching.scala118
-rw-r--r--src/library/scala/collection/Seq.scala3
-rw-r--r--src/library/scala/collection/SeqExtractors.scala3
-rw-r--r--src/library/scala/collection/SeqLike.scala64
-rw-r--r--src/library/scala/collection/SeqProxy.scala3
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala5
-rw-r--r--src/library/scala/collection/SeqView.scala3
-rw-r--r--src/library/scala/collection/SeqViewLike.scala4
-rw-r--r--src/library/scala/collection/Sequentializable.scala.disabled10
-rw-r--r--src/library/scala/collection/Set.scala4
-rw-r--r--src/library/scala/collection/SetLike.scala12
-rw-r--r--src/library/scala/collection/SetProxy.scala3
-rw-r--r--src/library/scala/collection/SetProxyLike.scala6
-rw-r--r--src/library/scala/collection/SortedMap.scala7
-rw-r--r--src/library/scala/collection/SortedMapLike.scala34
-rw-r--r--src/library/scala/collection/SortedSet.scala3
-rw-r--r--src/library/scala/collection/SortedSetLike.scala13
-rw-r--r--src/library/scala/collection/Traversable.scala7
-rw-r--r--src/library/scala/collection/TraversableLike.scala28
-rw-r--r--src/library/scala/collection/TraversableOnce.scala32
-rw-r--r--src/library/scala/collection/TraversableProxy.scala3
-rw-r--r--src/library/scala/collection/TraversableProxyLike.scala3
-rw-r--r--src/library/scala/collection/TraversableView.scala4
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala8
-rw-r--r--src/library/scala/collection/concurrent/BasicNode.java11
-rw-r--r--src/library/scala/collection/concurrent/CNodeBase.java18
-rw-r--r--src/library/scala/collection/concurrent/Gen.java9
-rw-r--r--src/library/scala/collection/concurrent/INodeBase.java20
-rw-r--r--src/library/scala/collection/concurrent/MainNode.java21
-rw-r--r--src/library/scala/collection/concurrent/Map.scala3
-rw-r--r--src/library/scala/collection/concurrent/TrieMap.scala25
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala25
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala22
-rw-r--r--src/library/scala/collection/convert/Decorators.scala5
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala24
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala47
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala64
-rw-r--r--src/library/scala/collection/convert/package.scala3
-rw-r--r--src/library/scala/collection/generic/BitOperations.scala3
-rw-r--r--src/library/scala/collection/generic/BitSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/CanBuildFrom.scala3
-rw-r--r--src/library/scala/collection/generic/CanCombineFrom.scala3
-rw-r--r--src/library/scala/collection/generic/ClassTagTraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/Clearable.scala3
-rwxr-xr-xsrc/library/scala/collection/generic/FilterMonadic.scala5
-rw-r--r--src/library/scala/collection/generic/GenMapFactory.scala5
-rw-r--r--src/library/scala/collection/generic/GenSeqFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala22
-rw-r--r--src/library/scala/collection/generic/GenericClassTagCompanion.scala9
-rw-r--r--src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericCompanion.scala9
-rw-r--r--src/library/scala/collection/generic/GenericOrderedCompanion.scala9
-rw-r--r--src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericParCompanion.scala4
-rw-r--r--src/library/scala/collection/generic/GenericParTemplate.scala4
-rw-r--r--src/library/scala/collection/generic/GenericSeqCompanion.scala3
-rw-r--r--src/library/scala/collection/generic/GenericSetTemplate.scala3
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala67
-rw-r--r--src/library/scala/collection/generic/Growable.scala20
-rwxr-xr-xsrc/library/scala/collection/generic/HasNewBuilder.scala3
-rw-r--r--src/library/scala/collection/generic/HasNewCombiner.scala4
-rw-r--r--src/library/scala/collection/generic/ImmutableMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ImmutableSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ImmutableSortedSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/IndexedSeqFactory.scala (renamed from src/library/scala/ScalaObject.scala)16
-rw-r--r--src/library/scala/collection/generic/IsSeqLike.scala58
-rw-r--r--src/library/scala/collection/generic/IsTraversableLike.scala3
-rw-r--r--src/library/scala/collection/generic/IsTraversableOnce.scala3
-rw-r--r--src/library/scala/collection/generic/IterableForwarder.scala7
-rw-r--r--src/library/scala/collection/generic/MapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/MutableMapFactory.scala3
-rw-r--r--src/library/scala/collection/generic/MutableSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/MutableSortedSetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/OrderedTraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/ParFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParMapFactory.scala4
-rw-r--r--src/library/scala/collection/generic/ParSetFactory.scala4
-rw-r--r--src/library/scala/collection/generic/SeqFactory.scala3
-rw-r--r--src/library/scala/collection/generic/SeqForwarder.scala6
-rw-r--r--src/library/scala/collection/generic/SetFactory.scala3
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala3
-rw-r--r--src/library/scala/collection/generic/Signalling.scala10
-rw-r--r--src/library/scala/collection/generic/Sizing.scala4
-rw-r--r--src/library/scala/collection/generic/SliceInterval.scala3
-rw-r--r--src/library/scala/collection/generic/Sorted.scala35
-rw-r--r--src/library/scala/collection/generic/SortedMapFactory.scala5
-rw-r--r--src/library/scala/collection/generic/SortedSetFactory.scala7
-rw-r--r--src/library/scala/collection/generic/Subtractable.scala3
-rw-r--r--src/library/scala/collection/generic/TraversableFactory.scala3
-rw-r--r--src/library/scala/collection/generic/TraversableForwarder.scala4
-rw-r--r--src/library/scala/collection/generic/package.scala3
-rw-r--r--src/library/scala/collection/immutable/BitSet.scala12
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala11
-rw-r--r--src/library/scala/collection/immutable/GenIterable.scala.disabled37
-rw-r--r--src/library/scala/collection/immutable/GenMap.scala.disabled36
-rw-r--r--src/library/scala/collection/immutable/GenSeq.scala.disabled49
-rw-r--r--src/library/scala/collection/immutable/GenSet.scala.disabled43
-rw-r--r--src/library/scala/collection/immutable/GenTraversable.scala.disabled41
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala12
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala4
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala8
-rw-r--r--src/library/scala/collection/immutable/IntMap.scala9
-rw-r--r--src/library/scala/collection/immutable/Iterable.scala3
-rw-r--r--src/library/scala/collection/immutable/LinearSeq.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala287
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala42
-rw-r--r--src/library/scala/collection/immutable/ListSet.scala13
-rw-r--r--src/library/scala/collection/immutable/LongMap.scala22
-rw-r--r--src/library/scala/collection/immutable/Map.scala3
-rw-r--r--src/library/scala/collection/immutable/MapLike.scala5
-rw-r--r--src/library/scala/collection/immutable/MapProxy.scala3
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala43
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala7
-rw-r--r--src/library/scala/collection/immutable/Queue.scala3
-rw-r--r--src/library/scala/collection/immutable/Range.scala32
-rw-r--r--src/library/scala/collection/immutable/RedBlack.scala293
-rw-r--r--src/library/scala/collection/immutable/RedBlackTree.scala215
-rw-r--r--src/library/scala/collection/immutable/Seq.scala3
-rw-r--r--src/library/scala/collection/immutable/Set.scala3
-rw-r--r--src/library/scala/collection/immutable/SetProxy.scala3
-rw-r--r--src/library/scala/collection/immutable/SortedMap.scala13
-rw-r--r--src/library/scala/collection/immutable/SortedSet.scala3
-rw-r--r--src/library/scala/collection/immutable/Stack.scala3
-rw-r--r--src/library/scala/collection/immutable/Stream.scala58
-rw-r--r--src/library/scala/collection/immutable/StreamView.scala3
-rw-r--r--src/library/scala/collection/immutable/StreamViewLike.scala3
-rw-r--r--src/library/scala/collection/immutable/StringLike.scala17
-rw-r--r--src/library/scala/collection/immutable/StringOps.scala5
-rw-r--r--src/library/scala/collection/immutable/Traversable.scala3
-rw-r--r--src/library/scala/collection/immutable/TreeMap.scala16
-rw-r--r--src/library/scala/collection/immutable/TreeSet.scala13
-rw-r--r--src/library/scala/collection/immutable/TrieIterator.scala10
-rw-r--r--src/library/scala/collection/immutable/Vector.scala58
-rw-r--r--src/library/scala/collection/immutable/WrappedString.scala3
-rw-r--r--src/library/scala/collection/immutable/package.scala93
-rw-r--r--src/library/scala/collection/mutable/AVLTree.scala18
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala3
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala6
-rw-r--r--src/library/scala/collection/mutable/ArrayLike.scala6
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala16
-rw-r--r--src/library/scala/collection/mutable/ArraySeq.scala5
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala4
-rw-r--r--src/library/scala/collection/mutable/BitSet.scala59
-rw-r--r--src/library/scala/collection/mutable/Buffer.scala3
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala7
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala8
-rw-r--r--src/library/scala/collection/mutable/Builder.scala2
-rw-r--r--src/library/scala/collection/mutable/Cloneable.scala3
-rw-r--r--src/library/scala/collection/mutable/ConcurrentMap.scala90
-rw-r--r--src/library/scala/collection/mutable/DefaultEntry.scala3
-rw-r--r--src/library/scala/collection/mutable/DefaultMapModel.scala3
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala5
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedListLike.scala3
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala128
-rw-r--r--src/library/scala/collection/mutable/GenIterable.scala.disabled37
-rw-r--r--src/library/scala/collection/mutable/GenMap.scala.disabled40
-rw-r--r--src/library/scala/collection/mutable/GenSeq.scala.disabled44
-rw-r--r--src/library/scala/collection/mutable/GenSet.scala.disabled46
-rw-r--r--src/library/scala/collection/mutable/GenTraversable.scala.disabled38
-rw-r--r--src/library/scala/collection/mutable/GrowingBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/HashEntry.scala3
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala15
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala23
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala23
-rw-r--r--src/library/scala/collection/mutable/History.scala11
-rw-r--r--src/library/scala/collection/mutable/ImmutableMapAdaptor.scala3
-rw-r--r--src/library/scala/collection/mutable/ImmutableSetAdaptor.scala3
-rw-r--r--src/library/scala/collection/mutable/IndexedSeq.scala3
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqLike.scala6
-rwxr-xr-xsrc/library/scala/collection/mutable/IndexedSeqOptimized.scala6
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala5
-rw-r--r--src/library/scala/collection/mutable/Iterable.scala3
-rw-r--r--src/library/scala/collection/mutable/LazyBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/LinearSeq.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedEntry.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala9
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala5
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala8
-rw-r--r--src/library/scala/collection/mutable/ListBuffer.scala57
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala3
-rw-r--r--src/library/scala/collection/mutable/Map.scala5
-rw-r--r--src/library/scala/collection/mutable/MapBuilder.scala3
-rw-r--r--src/library/scala/collection/mutable/MapLike.scala9
-rw-r--r--src/library/scala/collection/mutable/MapProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/MultiMap.scala3
-rw-r--r--src/library/scala/collection/mutable/MutableList.scala8
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala5
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala5
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala5
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala40
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala17
-rw-r--r--src/library/scala/collection/mutable/PriorityQueueProxy.scala15
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala5
-rw-r--r--src/library/scala/collection/mutable/Queue.scala12
-rw-r--r--src/library/scala/collection/mutable/QueueProxy.scala7
-rw-r--r--src/library/scala/collection/mutable/RevertibleHistory.scala7
-rw-r--r--src/library/scala/collection/mutable/Seq.scala3
-rw-r--r--src/library/scala/collection/mutable/SeqLike.scala4
-rw-r--r--src/library/scala/collection/mutable/Set.scala3
-rw-r--r--src/library/scala/collection/mutable/SetBuilder.scala6
-rw-r--r--src/library/scala/collection/mutable/SetLike.scala7
-rw-r--r--src/library/scala/collection/mutable/SetProxy.scala3
-rw-r--r--src/library/scala/collection/mutable/SortedSet.scala3
-rw-r--r--src/library/scala/collection/mutable/Stack.scala3
-rw-r--r--src/library/scala/collection/mutable/StackProxy.scala7
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala11
-rw-r--r--src/library/scala/collection/mutable/Subscriber.scala3
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala5
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala5
-rw-r--r--src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala15
-rw-r--r--src/library/scala/collection/mutable/SynchronizedQueue.scala9
-rw-r--r--src/library/scala/collection/mutable/SynchronizedSet.scala7
-rw-r--r--src/library/scala/collection/mutable/SynchronizedStack.scala7
-rw-r--r--src/library/scala/collection/mutable/Traversable.scala3
-rw-r--r--src/library/scala/collection/mutable/TreeSet.scala113
-rw-r--r--src/library/scala/collection/mutable/Undoable.scala3
-rw-r--r--src/library/scala/collection/mutable/UnrolledBuffer.scala7
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala3
-rw-r--r--src/library/scala/collection/mutable/WrappedArray.scala3
-rw-r--r--src/library/scala/collection/mutable/WrappedArrayBuilder.scala4
-rw-r--r--src/library/scala/collection/parallel/Combiner.scala17
-rw-r--r--src/library/scala/collection/parallel/ParIterable.scala4
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala124
-rw-r--r--src/library/scala/collection/parallel/ParIterableView.scala3
-rw-r--r--src/library/scala/collection/parallel/ParIterableViewLike.scala10
-rw-r--r--src/library/scala/collection/parallel/ParMap.scala3
-rw-r--r--src/library/scala/collection/parallel/ParMapLike.scala7
-rw-r--r--src/library/scala/collection/parallel/ParSeq.scala32
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala19
-rw-r--r--src/library/scala/collection/parallel/ParSeqView.scala6
-rw-r--r--src/library/scala/collection/parallel/ParSeqViewLike.scala11
-rw-r--r--src/library/scala/collection/parallel/ParSet.scala56
-rw-r--r--src/library/scala/collection/parallel/ParSetLike.scala11
-rw-r--r--src/library/scala/collection/parallel/PreciseSplitter.scala3
-rw-r--r--src/library/scala/collection/parallel/RemainsIterator.scala135
-rw-r--r--src/library/scala/collection/parallel/Splitter.scala5
-rw-r--r--src/library/scala/collection/parallel/TaskSupport.scala27
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala102
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashMap.scala5
-rw-r--r--src/library/scala/collection/parallel/immutable/ParHashSet.scala5
-rw-r--r--src/library/scala/collection/parallel/immutable/ParIterable.scala30
-rw-r--r--src/library/scala/collection/parallel/immutable/ParMap.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled128
-rw-r--r--src/library/scala/collection/parallel/immutable/ParRange.scala10
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSeq.scala6
-rw-r--r--src/library/scala/collection/parallel/immutable/ParSet.scala4
-rw-r--r--src/library/scala/collection/parallel/immutable/ParVector.scala3
-rw-r--r--src/library/scala/collection/parallel/immutable/package.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/LazyCombiner.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala61
-rw-r--r--src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala15
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala31
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala80
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashTable.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ParIterable.scala30
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMap.scala40
-rw-r--r--src/library/scala/collection/parallel/mutable/ParMapLike.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSeq.scala26
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSet.scala12
-rw-r--r--src/library/scala/collection/parallel/mutable/ParSetLike.scala9
-rw-r--r--src/library/scala/collection/parallel/mutable/ParTrieMap.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala5
-rw-r--r--src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala8
-rw-r--r--src/library/scala/collection/parallel/mutable/package.scala3
-rw-r--r--src/library/scala/collection/parallel/package.scala11
-rw-r--r--src/library/scala/collection/script/Location.scala3
-rw-r--r--src/library/scala/collection/script/Message.scala5
-rw-r--r--src/library/scala/collection/script/Scriptable.scala3
-rw-r--r--src/library/scala/compat/Platform.scala5
-rw-r--r--src/library/scala/concurrent/BatchingExecutor.scala117
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala4
-rw-r--r--src/library/scala/concurrent/Future.scala160
-rw-r--r--src/library/scala/concurrent/FutureTaskRunner.scala6
-rw-r--r--src/library/scala/concurrent/JavaConversions.scala32
-rw-r--r--src/library/scala/concurrent/ManagedBlocker.scala2
-rw-r--r--src/library/scala/concurrent/Promise.scala6
-rw-r--r--src/library/scala/concurrent/SyncVar.scala8
-rw-r--r--src/library/scala/concurrent/TaskRunner.scala2
-rw-r--r--src/library/scala/concurrent/TaskRunners.scala36
-rw-r--r--src/library/scala/concurrent/ThreadPoolRunner.scala2
-rw-r--r--src/library/scala/concurrent/ThreadRunner.scala60
-rw-r--r--src/library/scala/concurrent/duration/Duration.scala9
-rw-r--r--src/library/scala/concurrent/duration/package.scala14
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala57
-rw-r--r--src/library/scala/concurrent/impl/Future.scala2
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala6
-rw-r--r--src/library/scala/concurrent/ops.scala73
-rw-r--r--src/library/scala/concurrent/package.scala4
-rw-r--r--src/library/scala/deprecatedInheritance.scala3
-rw-r--r--src/library/scala/io/AnsiColor.scala53
-rw-r--r--src/library/scala/io/BufferedSource.scala4
-rw-r--r--src/library/scala/io/BytePickle.scala318
-rw-r--r--src/library/scala/io/Codec.scala59
-rw-r--r--src/library/scala/io/Position.scala11
-rw-r--r--src/library/scala/io/ReadStdin.scala228
-rw-r--r--src/library/scala/io/Source.scala15
-rw-r--r--src/library/scala/io/UTF8Codec.scala32
-rw-r--r--src/library/scala/math/BigDecimal.scala29
-rw-r--r--src/library/scala/math/BigInt.scala20
-rw-r--r--src/library/scala/math/Equiv.scala3
-rw-r--r--src/library/scala/math/Fractional.scala3
-rw-r--r--src/library/scala/math/Integral.scala3
-rw-r--r--src/library/scala/math/Numeric.scala21
-rw-r--r--src/library/scala/math/Ordered.scala3
-rw-r--r--src/library/scala/math/Ordering.scala8
-rw-r--r--src/library/scala/math/PartialOrdering.scala3
-rw-r--r--src/library/scala/math/PartiallyOrdered.scala3
-rw-r--r--src/library/scala/math/ScalaNumber.java2
-rw-r--r--src/library/scala/math/ScalaNumericConversions.scala19
-rw-r--r--src/library/scala/math/package.scala36
-rw-r--r--src/library/scala/package.scala13
-rw-r--r--src/library/scala/parallel/Future.scala39
-rw-r--r--src/library/scala/parallel/package.scala.disabled178
-rw-r--r--src/library/scala/ref/SoftReference.scala3
-rw-r--r--src/library/scala/reflect/ClassManifestDeprecatedApis.scala3
-rw-r--r--src/library/scala/reflect/Manifest.scala3
-rwxr-xr-xsrc/library/scala/reflect/NameTransformer.scala21
-rw-r--r--src/library/scala/reflect/NoManifest.scala3
-rw-r--r--src/library/scala/reflect/OptManifest.scala3
-rw-r--r--src/library/scala/reflect/package.scala16
-rw-r--r--src/library/scala/runtime/AbstractFunction0.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction1.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction10.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction11.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction12.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction13.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction14.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction15.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction16.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction17.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction18.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction19.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction2.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction20.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction21.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction22.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction3.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction4.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction5.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction6.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction7.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction8.scala2
-rw-r--r--src/library/scala/runtime/AbstractFunction9.scala2
-rw-r--r--src/library/scala/runtime/AbstractPartialFunction.scala5
-rw-r--r--src/library/scala/runtime/BooleanRef.java3
-rw-r--r--src/library/scala/runtime/Boxed.scala4
-rw-r--r--src/library/scala/runtime/ByteRef.java3
-rw-r--r--src/library/scala/runtime/CharRef.java3
-rw-r--r--src/library/scala/runtime/DoubleRef.java3
-rw-r--r--src/library/scala/runtime/FloatRef.java3
-rw-r--r--src/library/scala/runtime/IntRef.java3
-rw-r--r--src/library/scala/runtime/LongRef.java3
-rw-r--r--src/library/scala/runtime/MethodCache.scala4
-rw-r--r--src/library/scala/runtime/NonLocalReturnControl.scala4
-rw-r--r--src/library/scala/runtime/Nothing$.scala4
-rw-r--r--src/library/scala/runtime/Null$.scala9
-rw-r--r--src/library/scala/runtime/ObjectRef.java3
-rw-r--r--src/library/scala/runtime/RichBoolean.scala4
-rw-r--r--src/library/scala/runtime/RichByte.scala4
-rw-r--r--src/library/scala/runtime/RichChar.scala4
-rw-r--r--src/library/scala/runtime/RichException.scala4
-rw-r--r--src/library/scala/runtime/RichFloat.scala8
-rw-r--r--src/library/scala/runtime/RichInt.scala4
-rw-r--r--src/library/scala/runtime/RichLong.scala4
-rw-r--r--src/library/scala/runtime/RichShort.scala4
-rw-r--r--src/library/scala/runtime/ScalaNumberProxy.scala7
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala26
-rw-r--r--src/library/scala/runtime/SeqCharSequence.scala3
-rw-r--r--src/library/scala/runtime/ShortRef.java3
-rw-r--r--src/library/scala/runtime/StringAdd.scala5
-rw-r--r--src/library/scala/runtime/StringFormat.scala5
-rw-r--r--src/library/scala/runtime/Tuple2Zipped.scala30
-rw-r--r--src/library/scala/runtime/Tuple3Zipped.scala30
-rwxr-xr-xsrc/library/scala/runtime/VolatileBooleanRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileByteRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileCharRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileDoubleRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileFloatRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileIntRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileLongRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileObjectRef.java3
-rwxr-xr-xsrc/library/scala/runtime/VolatileShortRef.java3
-rw-r--r--src/library/scala/runtime/WorksheetSupport.scala2
-rw-r--r--src/library/scala/sys/BooleanProp.scala3
-rw-r--r--src/library/scala/sys/PropImpl.scala3
-rw-r--r--src/library/scala/sys/ShutdownHookThread.scala3
-rw-r--r--src/library/scala/sys/SystemProperties.scala4
-rw-r--r--src/library/scala/sys/process/BasicIO.scala33
-rw-r--r--src/library/scala/sys/process/Process.scala3
-rw-r--r--src/library/scala/sys/process/ProcessBuilder.scala49
-rw-r--r--src/library/scala/sys/process/ProcessBuilderImpl.scala33
-rw-r--r--src/library/scala/sys/process/ProcessIO.scala3
-rw-r--r--src/library/scala/sys/process/ProcessImpl.scala19
-rw-r--r--src/library/scala/sys/process/ProcessLogger.scala3
-rw-r--r--src/library/scala/sys/process/package.scala2
-rw-r--r--src/library/scala/testing/Benchmark.scala114
-rw-r--r--src/library/scala/testing/Show.scala75
-rw-r--r--src/library/scala/text/Document.scala2
-rw-r--r--src/library/scala/throws.scala2
-rw-r--r--src/library/scala/util/DynamicVariable.scala3
-rw-r--r--src/library/scala/util/Either.scala9
-rw-r--r--src/library/scala/util/Marshal.scala50
-rw-r--r--src/library/scala/util/MurmurHash.scala8
-rw-r--r--src/library/scala/util/Properties.scala5
-rw-r--r--src/library/scala/util/Random.scala7
-rw-r--r--src/library/scala/util/Try.scala5
-rw-r--r--src/library/scala/util/control/Breaks.scala3
-rw-r--r--src/library/scala/util/control/ControlThrowable.scala3
-rw-r--r--src/library/scala/util/control/Exception.scala3
-rw-r--r--src/library/scala/util/control/NonFatal.scala7
-rw-r--r--src/library/scala/util/control/TailCalls.scala3
-rw-r--r--src/library/scala/util/grammar/HedgeRHS.scala26
-rw-r--r--src/library/scala/util/grammar/TreeRHS.scala22
-rw-r--r--src/library/scala/util/hashing/ByteswapHashing.scala13
-rw-r--r--src/library/scala/util/hashing/Hashing.scala3
-rw-r--r--src/library/scala/util/hashing/MurmurHash3.scala5
-rw-r--r--src/library/scala/util/hashing/package.scala9
-rw-r--r--src/library/scala/util/logging/ConsoleLogger.scala26
-rw-r--r--src/library/scala/util/logging/Logged.scala33
-rw-r--r--src/library/scala/util/matching/Regex.scala115
-rw-r--r--src/library/scala/util/parsing/ast/AbstractSyntax.scala32
-rw-r--r--src/library/scala/util/parsing/ast/Binders.scala347
-rw-r--r--src/library/scala/util/parsing/combinator/ImplicitConversions.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala4
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala11
-rw-r--r--src/library/scala/util/parsing/combinator/RegexParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Lexical.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/Scanners.scala6
-rw-r--r--src/library/scala/util/parsing/combinator/lexical/StdLexical.scala5
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/testing/RegexTest.scala27
-rw-r--r--src/library/scala/util/parsing/combinator/testing/Tester.scala45
-rw-r--r--src/library/scala/util/parsing/combinator/token/StdTokens.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/token/Tokens.scala3
-rw-r--r--src/library/scala/util/parsing/input/CharArrayReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/CharSequenceReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/NoPosition.scala3
-rw-r--r--src/library/scala/util/parsing/input/OffsetPosition.scala5
-rw-r--r--src/library/scala/util/parsing/input/PagedSeqReader.scala3
-rw-r--r--src/library/scala/util/parsing/input/Position.scala7
-rw-r--r--src/library/scala/util/parsing/input/Positional.scala3
-rw-r--r--src/library/scala/util/parsing/input/Reader.scala3
-rw-r--r--src/library/scala/util/parsing/input/StreamReader.scala5
-rw-r--r--src/library/scala/util/parsing/json/JSON.scala7
-rw-r--r--src/library/scala/util/parsing/json/Lexer.scala5
-rw-r--r--src/library/scala/util/parsing/json/Parser.scala11
-rw-r--r--src/library/scala/xml/Atom.scala3
-rw-r--r--src/library/scala/xml/Attribute.scala5
-rw-r--r--src/library/scala/xml/Comment.scala3
-rw-r--r--src/library/scala/xml/Document.scala3
-rwxr-xr-xsrc/library/scala/xml/Elem.scala5
-rw-r--r--src/library/scala/xml/EntityRef.scala3
-rw-r--r--src/library/scala/xml/Equality.scala7
-rw-r--r--src/library/scala/xml/Group.scala5
-rw-r--r--src/library/scala/xml/MalformedAttributeException.scala3
-rw-r--r--src/library/scala/xml/MetaData.scala3
-rw-r--r--src/library/scala/xml/NamespaceBinding.scala25
-rwxr-xr-xsrc/library/scala/xml/Node.scala15
-rw-r--r--src/library/scala/xml/NodeBuffer.scala3
-rw-r--r--src/library/scala/xml/NodeSeq.scala8
-rw-r--r--src/library/scala/xml/Null.scala3
-rw-r--r--src/library/scala/xml/PCData.scala3
-rw-r--r--src/library/scala/xml/PrefixedAttribute.scala3
-rwxr-xr-xsrc/library/scala/xml/PrettyPrinter.scala8
-rw-r--r--src/library/scala/xml/ProcInstr.scala3
-rw-r--r--src/library/scala/xml/QNode.scala3
-rw-r--r--src/library/scala/xml/SpecialNode.scala3
-rw-r--r--src/library/scala/xml/Text.scala3
-rw-r--r--src/library/scala/xml/TextBuffer.scala3
-rw-r--r--src/library/scala/xml/TopScope.scala3
-rw-r--r--src/library/scala/xml/TypeSymbol.scala3
-rw-r--r--src/library/scala/xml/Unparsed.scala3
-rw-r--r--src/library/scala/xml/UnprefixedAttribute.scala3
-rwxr-xr-xsrc/library/scala/xml/Utility.scala25
-rwxr-xr-xsrc/library/scala/xml/XML.scala5
-rw-r--r--src/library/scala/xml/Xhtml.scala3
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala8
-rw-r--r--src/library/scala/xml/dtd/ContentModelParser.scala77
-rw-r--r--src/library/scala/xml/dtd/DTD.scala3
-rw-r--r--src/library/scala/xml/dtd/Decl.scala5
-rw-r--r--src/library/scala/xml/dtd/DocType.scala15
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala19
-rw-r--r--src/library/scala/xml/dtd/ExternalID.scala17
-rw-r--r--src/library/scala/xml/dtd/Scanner.scala33
-rw-r--r--src/library/scala/xml/dtd/Tokens.scala3
-rw-r--r--src/library/scala/xml/dtd/ValidationException.scala5
-rw-r--r--src/library/scala/xml/dtd/impl/Base.scala (renamed from src/library/scala/util/regexp/Base.scala)5
-rw-r--r--src/library/scala/xml/dtd/impl/BaseBerrySethi.scala (renamed from src/library/scala/util/automata/BaseBerrySethi.scala)6
-rw-r--r--src/library/scala/xml/dtd/impl/DetWordAutom.scala (renamed from src/library/scala/util/automata/DetWordAutom.scala)5
-rw-r--r--src/library/scala/xml/dtd/impl/Inclusion.scala (renamed from src/library/scala/util/automata/Inclusion.scala)5
-rw-r--r--src/library/scala/xml/dtd/impl/NondetWordAutom.scala (renamed from src/library/scala/util/automata/NondetWordAutom.scala)11
-rw-r--r--src/library/scala/xml/dtd/impl/PointedHedgeExp.scala (renamed from src/library/scala/util/regexp/PointedHedgeExp.scala)5
-rw-r--r--src/library/scala/xml/dtd/impl/SubsetConstruction.scala (renamed from src/library/scala/util/automata/SubsetConstruction.scala)7
-rw-r--r--src/library/scala/xml/dtd/impl/SyntaxError.scala (renamed from src/library/scala/util/regexp/SyntaxError.scala)5
-rw-r--r--src/library/scala/xml/dtd/impl/WordBerrySethi.scala (renamed from src/library/scala/util/automata/WordBerrySethi.scala)10
-rw-r--r--src/library/scala/xml/dtd/impl/WordExp.scala (renamed from src/library/scala/util/regexp/WordExp.scala)5
-rwxr-xr-xsrc/library/scala/xml/factory/Binder.scala5
-rw-r--r--src/library/scala/xml/factory/LoggedNodeFactory.scala23
-rw-r--r--src/library/scala/xml/factory/NodeFactory.scala3
-rw-r--r--src/library/scala/xml/factory/XMLLoader.scala7
-rw-r--r--src/library/scala/xml/include/CircularIncludeException.scala3
-rw-r--r--src/library/scala/xml/include/UnavailableResourceException.scala3
-rw-r--r--src/library/scala/xml/include/XIncludeException.scala3
-rw-r--r--src/library/scala/xml/include/sax/EncodingHeuristics.scala5
-rw-r--r--src/library/scala/xml/include/sax/Main.scala82
-rw-r--r--src/library/scala/xml/include/sax/XIncludeFilter.scala21
-rw-r--r--src/library/scala/xml/include/sax/XIncluder.scala35
-rwxr-xr-xsrc/library/scala/xml/parsing/ConstructingHandler.scala3
-rw-r--r--src/library/scala/xml/parsing/ConstructingParser.scala3
-rwxr-xr-xsrc/library/scala/xml/parsing/DefaultMarkupHandler.scala3
-rw-r--r--src/library/scala/xml/parsing/ExternalSources.scala3
-rw-r--r--src/library/scala/xml/parsing/FactoryAdapter.scala13
-rw-r--r--src/library/scala/xml/parsing/FatalError.scala3
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupHandler.scala11
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala227
-rw-r--r--src/library/scala/xml/parsing/MarkupParserCommon.scala34
-rw-r--r--src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala3
-rw-r--r--src/library/scala/xml/parsing/TokenTests.scala3
-rw-r--r--src/library/scala/xml/parsing/ValidatingMarkupHandler.scala27
-rw-r--r--src/library/scala/xml/parsing/XhtmlEntities.scala3
-rw-r--r--src/library/scala/xml/parsing/XhtmlParser.scala5
-rw-r--r--src/library/scala/xml/persistent/CachedFileStorage.scala24
-rw-r--r--src/library/scala/xml/persistent/Index.scala3
-rw-r--r--src/library/scala/xml/persistent/SetStorage.scala9
-rw-r--r--src/library/scala/xml/pull/XMLEvent.scala3
-rwxr-xr-xsrc/library/scala/xml/pull/XMLEventReader.scala9
-rw-r--r--src/library/scala/xml/pull/package.scala3
-rw-r--r--src/library/scala/xml/transform/BasicTransformer.scala5
-rw-r--r--src/library/scala/xml/transform/RewriteRule.scala7
-rw-r--r--src/library/scala/xml/transform/RuleTransformer.scala5
-rw-r--r--src/manual/scala/man1/scalac.scala24
-rw-r--r--src/manual/scala/tools/docutil/resources/index.html4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Assembly.java253
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java96
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Attribute.java654
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java169
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java75
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java48
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java54
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java82
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java45
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java32
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java58
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java119
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java141
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java40
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java9
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java57
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java47
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java81
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java158
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java198
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java116
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java69
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Module.java155
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java69
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEFile.java941
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEModule.java456
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PEType.java419
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java72
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java76
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java62
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java45
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java104
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Type.java1142
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java190
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/Version.java71
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala125
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala64
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala60
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala18
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala539
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala861
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala148
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala44
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala70
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala136
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala137
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala1948
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala1205
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala44
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala93
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala261
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala24
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala58
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java31
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java18
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java100
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java311
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java92
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java23
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java57
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java199
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java129
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/util/Table.java1859
-rw-r--r--src/partest/README1
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala5
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala10
-rw-r--r--src/partest/scala/tools/partest/IcodeTest.scala4
-rw-r--r--src/partest/scala/tools/partest/JavapTest.scala26
-rw-r--r--src/partest/scala/tools/partest/PartestDefaults.scala15
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala351
-rw-r--r--src/partest/scala/tools/partest/SecurityTest.scala13
-rw-r--r--src/partest/scala/tools/partest/TestKinds.scala67
-rw-r--r--src/partest/scala/tools/partest/TestState.scala65
-rw-r--r--src/partest/scala/tools/partest/TestUtil.scala10
-rw-r--r--src/partest/scala/tools/partest/instrumented/Instrumentation.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala25
-rw-r--r--src/partest/scala/tools/partest/nest/CompileManager.scala164
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala58
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala337
-rw-r--r--src/partest/scala/tools/partest/nest/DirectCompiler.scala105
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala75
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala57
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala135
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala18
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala34
-rw-r--r--src/partest/scala/tools/partest/nest/Runner.scala900
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala862
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerUtils.scala29
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala29
-rw-r--r--src/partest/scala/tools/partest/nest/StreamCapture.scala53
-rw-r--r--src/partest/scala/tools/partest/nest/TestFile.scala81
-rw-r--r--src/partest/scala/tools/partest/package.scala179
-rw-r--r--src/partest/scala/tools/partest/utils/PrintMgr.scala52
-rw-r--r--src/reflect/scala/reflect/api/Annotations.scala34
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala5
-rw-r--r--src/reflect/scala/reflect/api/Constants.scala9
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala5
-rw-r--r--src/reflect/scala/reflect/api/FlagSets.scala9
-rw-r--r--src/reflect/scala/reflect/api/ImplicitTags.scala117
-rw-r--r--src/reflect/scala/reflect/api/Importers.scala3
-rw-r--r--src/reflect/scala/reflect/api/JavaMirrors.scala3
-rw-r--r--src/reflect/scala/reflect/api/JavaUniverse.scala3
-rw-r--r--src/reflect/scala/reflect/api/Mirror.scala9
-rw-r--r--src/reflect/scala/reflect/api/Mirrors.scala13
-rw-r--r--src/reflect/scala/reflect/api/Names.scala55
-rw-r--r--src/reflect/scala/reflect/api/Position.scala3
-rw-r--r--src/reflect/scala/reflect/api/Positions.scala10
-rw-r--r--src/reflect/scala/reflect/api/Printers.scala33
-rw-r--r--src/reflect/scala/reflect/api/Scopes.scala17
-rw-r--r--src/reflect/scala/reflect/api/StandardDefinitions.scala3
-rw-r--r--src/reflect/scala/reflect/api/StandardNames.scala8
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala53
-rw-r--r--src/reflect/scala/reflect/api/TagInterop.scala3
-rw-r--r--src/reflect/scala/reflect/api/TreeCreator.scala3
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala395
-rw-r--r--src/reflect/scala/reflect/api/TypeCreator.scala3
-rw-r--r--src/reflect/scala/reflect/api/Types.scala114
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala8
-rw-r--r--src/reflect/scala/reflect/api/package.scala7
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala11
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala47
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala23
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala7
-rw-r--r--src/reflect/scala/reflect/internal/CapturedVariables.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Chars.scala3
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala17
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala29
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala462
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala4
-rw-r--r--src/reflect/scala/reflect/internal/FatalError.scala3
-rw-r--r--src/reflect/scala/reflect/internal/FlagSets.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala39
-rw-r--r--src/reflect/scala/reflect/internal/HasFlags.scala8
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala668
-rw-r--r--src/reflect/scala/reflect/internal/InfoTransformers.scala5
-rw-r--r--src/reflect/scala/reflect/internal/JMethodOrConstructor.scala47
-rw-r--r--src/reflect/scala/reflect/internal/JavaAccFlags.scala84
-rw-r--r--src/reflect/scala/reflect/internal/Kinds.scala193
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala37
-rw-r--r--src/reflect/scala/reflect/internal/MissingRequirementError.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Mode.scala142
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala156
-rw-r--r--src/reflect/scala/reflect/internal/Phase.scala3
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala278
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala92
-rw-r--r--src/reflect/scala/reflect/internal/PrivateWithin.scala27
-rw-r--r--src/reflect/scala/reflect/internal/Required.scala10
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala176
-rw-r--r--src/reflect/scala/reflect/internal/StdAttachments.scala16
-rw-r--r--src/reflect/scala/reflect/internal/StdCreators.scala3
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala361
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala81
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala842
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala81
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala292
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala161
-rw-r--r--src/reflect/scala/reflect/internal/TypeDebugging.scala6
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala4005
-rw-r--r--src/reflect/scala/reflect/internal/Variance.scala91
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala197
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala3
-rw-r--r--src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala11
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala31
-rw-r--r--src/reflect/scala/reflect/internal/pickling/PickleFormat.scala8
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala48
-rw-r--r--src/reflect/scala/reflect/internal/settings/AbsSettings.scala3
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala13
-rw-r--r--src/reflect/scala/reflect/internal/tpe/CommonOwners.scala51
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala613
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala581
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala283
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala1164
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala30
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala60
-rw-r--r--src/reflect/scala/reflect/internal/transform/RefChecks.scala5
-rw-r--r--src/reflect/scala/reflect/internal/transform/Transforms.scala3
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala13
-rw-r--r--src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala)55
-rw-r--r--src/reflect/scala/reflect/internal/util/Collections.scala22
-rw-r--r--src/reflect/scala/reflect/internal/util/HashSet.scala6
-rw-r--r--src/reflect/scala/reflect/internal/util/Origins.scala5
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala96
-rw-r--r--src/reflect/scala/reflect/internal/util/RangePosition.scala50
-rw-r--r--src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala (renamed from src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala)54
-rw-r--r--src/reflect/scala/reflect/internal/util/Set.scala5
-rw-r--r--src/reflect/scala/reflect/internal/util/SourceFile.scala9
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala10
-rw-r--r--src/reflect/scala/reflect/internal/util/StringOps.scala43
-rw-r--r--src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala5
-rw-r--r--src/reflect/scala/reflect/internal/util/TableDef.scala13
-rw-r--r--src/reflect/scala/reflect/internal/util/ThreeValues.scala3
-rw-r--r--src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala43
-rw-r--r--src/reflect/scala/reflect/internal/util/TriState.scala26
-rw-r--r--src/reflect/scala/reflect/internal/util/WeakHashSet.scala6
-rw-r--r--src/reflect/scala/reflect/internal/util/package.scala32
-rw-r--r--src/reflect/scala/reflect/io/AbstractFile.scala93
-rw-r--r--src/reflect/scala/reflect/io/Directory.scala18
-rw-r--r--src/reflect/scala/reflect/io/File.scala95
-rw-r--r--src/reflect/scala/reflect/io/FileOperationException.scala3
-rw-r--r--src/reflect/scala/reflect/io/IOStats.scala32
-rw-r--r--src/reflect/scala/reflect/io/NoAbstractFile.scala9
-rw-r--r--src/reflect/scala/reflect/io/Path.scala88
-rw-r--r--src/reflect/scala/reflect/io/PlainFile.scala33
-rw-r--r--src/reflect/scala/reflect/io/Streamable.scala22
-rw-r--r--src/reflect/scala/reflect/io/VirtualDirectory.scala20
-rw-r--r--src/reflect/scala/reflect/io/VirtualFile.scala43
-rw-r--r--src/reflect/scala/reflect/io/ZipArchive.scala94
-rw-r--r--src/reflect/scala/reflect/macros/Aliases.scala3
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Context.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Enclosures.scala72
-rw-r--r--src/reflect/scala/reflect/macros/Evals.scala5
-rw-r--r--src/reflect/scala/reflect/macros/ExprUtils.scala3
-rw-r--r--src/reflect/scala/reflect/macros/FrontEnds.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Infrastructure.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Macro.scala39
-rw-r--r--src/reflect/scala/reflect/macros/Names.scala17
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala5
-rw-r--r--src/reflect/scala/reflect/macros/Reifiers.scala29
-rw-r--r--src/reflect/scala/reflect/macros/Synthetics.scala107
-rw-r--r--src/reflect/scala/reflect/macros/TreeBuilder.scala22
-rw-r--r--src/reflect/scala/reflect/macros/Typers.scala20
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala7
-rw-r--r--src/reflect/scala/reflect/macros/package.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala358
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala20
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectSetup.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala8
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala8
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala15
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala7
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala9
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/TwoWayCache.scala3
-rw-r--r--src/reflect/scala/reflect/runtime/package.scala7
-rw-r--r--src/repl/scala/tools/nsc/Interpreter.scala (renamed from src/compiler/scala/tools/nsc/Interpreter.scala)0
-rw-r--r--src/repl/scala/tools/nsc/InterpreterLoop.scala (renamed from src/compiler/scala/tools/nsc/InterpreterLoop.scala)0
-rw-r--r--src/repl/scala/tools/nsc/MainGenericRunner.scala (renamed from src/compiler/scala/tools/nsc/MainGenericRunner.scala)12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala (renamed from src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ByteCode.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ByteCode.scala)31
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CommandLine.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CommandLine.scala)1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Completion.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Completion.scala)2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionAware.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala)30
-rw-r--r--src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala (renamed from src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala)7
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Delimited.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Delimited.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ExprTyper.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala)32
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Formatting.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Formatting.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IBindings.java45
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ILoop.scala)405
-rw-r--r--src/repl/scala/tools/nsc/interpreter/IMain.scala (renamed from src/compiler/scala/tools/nsc/interpreter/IMain.scala)733
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ISettings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ISettings.scala)11
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Imports.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Imports.scala)80
-rw-r--r--src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala)10
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala (renamed from src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala)38
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/JLineReader.scala)12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JavapClass.scala692
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Logger.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Logger.scala)4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/LoopCommands.scala (renamed from src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala)31
-rw-r--r--src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala (renamed from src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala)102
-rw-r--r--src/repl/scala/tools/nsc/interpreter/NamedParam.scala (renamed from src/compiler/scala/tools/nsc/interpreter/NamedParam.scala)5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Naming.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Naming.scala)16
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Parsed.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Parsed.scala)9
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Pasted.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Pasted.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Phased.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Phased.scala)23
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Power.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Power.scala)136
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplConfig.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala)12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplDir.scala48
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala)11
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplProps.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplProps.scala)4
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplReporter.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplStrings.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala)3
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ReplVals.scala (renamed from src/compiler/scala/tools/nsc/interpreter/ReplVals.scala)1
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Results.scala (renamed from src/compiler/scala/tools/nsc/interpreter/Results.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/RichClass.scala (renamed from src/compiler/scala/tools/nsc/interpreter/RichClass.scala)5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/SimpleReader.scala (renamed from src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala)8
-rw-r--r--src/repl/scala/tools/nsc/interpreter/StdReplTags.scala15
-rw-r--r--src/repl/scala/tools/nsc/interpreter/package.scala199
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/History.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/History.scala)6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala)0
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala)6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/package.scala (renamed from src/compiler/scala/tools/nsc/interpreter/session/package.scala)0
-rw-r--r--src/scalacheck/org/scalacheck/Arbitrary.scala83
-rw-r--r--src/scalacheck/org/scalacheck/Arg.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Commands.scala6
-rw-r--r--src/scalacheck/org/scalacheck/ConsoleReporter.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Gen.scala41
-rw-r--r--src/scalacheck/org/scalacheck/Pretty.scala4
-rw-r--r--src/scalacheck/org/scalacheck/Prop.scala81
-rw-r--r--src/scalacheck/org/scalacheck/Properties.scala30
-rw-r--r--src/scalacheck/org/scalacheck/ScalaCheckFramework.scala92
-rw-r--r--src/scalacheck/org/scalacheck/Shrink.scala2
-rw-r--r--src/scalacheck/org/scalacheck/Test.scala207
-rw-r--r--src/scalacheck/org/scalacheck/util/Buildable.scala5
-rw-r--r--src/scalacheck/org/scalacheck/util/CmdLineParser.scala4
-rw-r--r--src/scalacheck/org/scalacheck/util/FreqMap.scala2
-rw-r--r--src/scalacheck/org/scalacheck/util/StdRand.scala2
-rw-r--r--src/scaladoc/scala/tools/ant/Scaladoc.scala (renamed from src/compiler/scala/tools/ant/Scaladoc.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/ScalaDoc.scala (renamed from src/compiler/scala/tools/nsc/ScalaDoc.scala)11
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/DocFactory.scala)22
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/DocParser.scala (renamed from src/compiler/scala/tools/nsc/doc/DocParser.scala)9
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Index.scala (renamed from src/compiler/scala/tools/nsc/doc/Index.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala263
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala49
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Settings.scala (renamed from src/compiler/scala/tools/nsc/doc/Settings.scala)30
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala (renamed from src/compiler/scala/tools/nsc/doc/Uncompilable.scala)6
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/Universe.scala (renamed from src/compiler/scala/tools/nsc/doc/Universe.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala (renamed from src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala)70
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala (renamed from src/compiler/scala/tools/nsc/doc/base/LinkTo.scala)2
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala (renamed from src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala)18
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala (renamed from src/compiler/scala/tools/nsc/doc/base/comment/Body.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala (renamed from src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala)3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Generator.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala (renamed from src/compiler/scala/tools/nsc/doc/doclet/Universer.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala (renamed from src/compiler/scala/tools/nsc/doc/html/Doclet.scala)2
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala (renamed from src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala)10
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/Page.scala (renamed from src/compiler/scala/tools/nsc/doc/html/Page.scala)13
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala (renamed from src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala)39
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Index.scala)21
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala)1
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala)5
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Source.scala)3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/Template.scala)95
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala)23
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala (renamed from src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala)9
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png)bin6232 -> 6232 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png)bin6220 -> 6220 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png)bin3357 -> 3357 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png)bin7516 -> 7516 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png)bin3910 -> 3910 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png)bin9006 -> 9006 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif)bin167 -> 167 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif)bin1544 -> 1544 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif)bin1341 -> 1341 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png)bin1692 -> 1692 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd)bin30823 -> 30823 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif)bin1462 -> 1462 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png)bin1803 -> 1803 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd)bin31295 -> 31295 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif)bin1324 -> 1324 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif)bin1104 -> 1104 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png)bin965 -> 965 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif)bin1366 -> 1366 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif)bin1115 -> 1115 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png)bin1198 -> 1198 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png)bin2441 -> 2441 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png)bin3356 -> 3356 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png)bin7653 -> 7653 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png)bin3903 -> 3903 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png)bin9158 -> 9158 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png)bin9200 -> 9200 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png)bin9158 -> 9158 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif)bin1145 -> 1145 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif)bin1118 -> 1118 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif)bin1145 -> 1145 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png)bin3335 -> 3335 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png)bin7312 -> 7312 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif)bin1201 -> 1201 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png)bin3186 -> 3186 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd)bin28904 -> 28904 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png)bin1150 -> 1150 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png)bin646 -> 646 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png)bin1380 -> 1380 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png)bin1864 -> 1864 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png)bin1434 -> 1434 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png)bin1965 -> 1965 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif)bin1214 -> 1214 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif)bin1209 -> 1209 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png)bin3374 -> 3374 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png)bin7410 -> 7410 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png)bin3882 -> 3882 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png)bin8967 -> 8967 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png)bin1445 -> 1445 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png)bin4236 -> 4236 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png)bin1841 -> 1841 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png)bin4969 -> 4969 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png)bin1879 -> 1879 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif)bin1206 -> 1206 bytes
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt (renamed from src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala)42
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Entity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/Entity.scala)34
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala)5
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala (renamed from src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala)7
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala)266
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala)142
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala)15
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala)0
-rwxr-xr-xsrc/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala)4
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala (renamed from src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala (renamed from src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala (renamed from src/compiler/scala/tools/nsc/doc/model/Visibility.scala)0
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala)11
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala)12
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala (renamed from src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala)3
-rw-r--r--src/scaladoc/scala/tools/partest/ScaladocModelTest.scala (renamed from src/partest/scala/tools/partest/ScaladocModelTest.scala)12
-rw-r--r--src/scalap/decoder.properties2
-rw-r--r--src/scalap/scala/tools/scalap/Arguments.scala2
-rw-r--r--src/scalap/scala/tools/scalap/ByteArrayReader.scala3
-rw-r--r--src/scalap/scala/tools/scalap/Classfile.scala10
-rw-r--r--src/scalap/scala/tools/scalap/CodeWriter.scala7
-rw-r--r--src/scalap/scala/tools/scalap/JavaWriter.scala9
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala3
-rw-r--r--src/scalap/scala/tools/scalap/MetaParser.scala3
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/Rule.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala52
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala9
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala1
-rw-r--r--src/swing/scala/swing/Action.scala7
-rw-r--r--src/swing/scala/swing/Button.scala3
-rw-r--r--src/swing/scala/swing/ButtonGroup.scala4
-rw-r--r--src/swing/scala/swing/ColorChooser.scala45
-rw-r--r--src/swing/scala/swing/ComboBox.scala3
-rw-r--r--src/swing/scala/swing/Component.scala4
-rw-r--r--src/swing/scala/swing/EditorPane.scala3
-rw-r--r--src/swing/scala/swing/Font.scala.disabled70
-rw-r--r--src/swing/scala/swing/FormattedTextField.scala4
-rw-r--r--src/swing/scala/swing/GridBagPanel.scala1
-rw-r--r--src/swing/scala/swing/ListView.scala2
-rw-r--r--src/swing/scala/swing/MainFrame.scala4
-rw-r--r--src/swing/scala/swing/Orientable.scala5
-rw-r--r--src/swing/scala/swing/Oriented.scala1
-rw-r--r--src/swing/scala/swing/PasswordField.scala4
-rw-r--r--src/swing/scala/swing/PopupMenu.scala65
-rw-r--r--src/swing/scala/swing/ProgressBar.scala4
-rw-r--r--src/swing/scala/swing/Reactions.scala2
-rw-r--r--src/swing/scala/swing/Slider.scala2
-rw-r--r--src/swing/scala/swing/SplitPane.scala3
-rw-r--r--src/swing/scala/swing/Swing.scala1
-rw-r--r--src/swing/scala/swing/SwingWorker.scala23
-rw-r--r--src/swing/scala/swing/TabbedPane.scala3
-rw-r--r--src/swing/scala/swing/TextArea.scala4
-rw-r--r--src/swing/scala/swing/TextComponent.scala3
-rw-r--r--src/swing/scala/swing/ToggleButton.scala3
-rw-r--r--src/swing/scala/swing/Window.scala3
-rw-r--r--src/swing/scala/swing/event/ColorChanged.scala (renamed from src/swing/scala/swing/SwingActor.scala)5
-rw-r--r--src/swing/scala/swing/event/PopupMenuEvent.scala (renamed from src/library/scala/SpecializableCompanion.scala)16
-rw-r--r--src/swing/scala/swing/package.scala3
1630 files changed, 35571 insertions, 72905 deletions
diff --git a/src/actors/scala/actors/AbstractActor.scala b/src/actors/scala/actors/AbstractActor.scala
index 5a4e0d9804..3c6299aab4 100644
--- a/src/actors/scala/actors/AbstractActor.scala
+++ b/src/actors/scala/actors/AbstractActor.scala
@@ -15,6 +15,7 @@ import scala.language.higherKinds
*
* @define actor actor
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait AbstractActor extends OutputChannel[Any] with CanReply[Any, Any] {
type Future[+R] <: scala.actors.Future[R]
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index 61124b3e85..75160fa18f 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -18,6 +18,7 @@ import scala.language.implicitConversions
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Actor extends Combinators {
/** State of an actor.
@@ -398,6 +399,7 @@ object Actor extends Combinators {
* @define channel actor's mailbox
*/
@SerialVersionUID(-781154067877019505L)
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait Actor extends InternalActor with ReplyReactor {
override def start(): Actor = synchronized {
diff --git a/src/actors/scala/actors/ActorRef.scala b/src/actors/scala/actors/ActorRef.scala
index 5c1790669b..0da167aede 100644
--- a/src/actors/scala/actors/ActorRef.scala
+++ b/src/actors/scala/actors/ActorRef.scala
@@ -45,8 +45,9 @@ trait ActorRef {
* This is what is used to complete a Future that is returned from an ask/? call,
* when it times out.
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class AskTimeoutException(message: String, cause: Throwable) extends TimeoutException {
def this(message: String) = this(message, null: Throwable)
}
-
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object PoisonPill
diff --git a/src/actors/scala/actors/CanReply.scala b/src/actors/scala/actors/CanReply.scala
index 3d264777a0..3f2c53f423 100644
--- a/src/actors/scala/actors/CanReply.scala
+++ b/src/actors/scala/actors/CanReply.scala
@@ -17,6 +17,7 @@ import scala.language.higherKinds
*
* @define actor `CanReply`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait CanReply[-T, +R] {
type Future[+P] <: () => P
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 9669ffbc17..ddf7b329c8 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -23,6 +23,7 @@ import scala.concurrent.SyncVar
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class ! [a](ch: Channel[a], msg: a)
/**
@@ -34,6 +35,7 @@ case class ! [a](ch: Channel[a], msg: a)
* @define actor channel
* @define channel channel
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/DaemonActor.scala b/src/actors/scala/actors/DaemonActor.scala
index ffe8b75c27..04a4b4a40c 100644
--- a/src/actors/scala/actors/DaemonActor.scala
+++ b/src/actors/scala/actors/DaemonActor.scala
@@ -18,6 +18,7 @@ import scheduler.DaemonScheduler
*
* @author Erik Engbrecht
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait DaemonActor extends Actor {
override def scheduler: IScheduler = DaemonScheduler
}
diff --git a/src/actors/scala/actors/Debug.scala b/src/actors/scala/actors/Debug.scala
index cc51dfdbae..31ef53bdbe 100644
--- a/src/actors/scala/actors/Debug.scala
+++ b/src/actors/scala/actors/Debug.scala
@@ -14,6 +14,7 @@ package scala.actors
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Debug extends Logger("") {}
private[actors] class Logger(tag: String) {
diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala
index 3037f82141..9d123cb2d5 100644
--- a/src/actors/scala/actors/Future.scala
+++ b/src/actors/scala/actors/Future.scala
@@ -21,6 +21,7 @@ import scala.concurrent.SyncVar
*
* @author Philipp Haller
*/
+@deprecated("Use the scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
abstract class Future[+T] extends Responder[T] with Function0[T] {
@volatile
@@ -107,6 +108,7 @@ private class FutureActor[T](fun: SyncVar[T] => Unit, channel: Channel[T]) exten
*
* @author Philipp Haller
*/
+@deprecated("Use the object scala.concurrent.Future instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Futures {
/** Arranges for the asynchronous execution of `body`,
@@ -174,7 +176,7 @@ object Futures {
* or timeout + `System.currentTimeMillis()` is negative.
*/
def awaitAll(timeout: Long, fts: Future[Any]*): List[Option[Any]] = {
- var resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
+ val resultsMap: scala.collection.mutable.Map[Int, Option[Any]] = new scala.collection.mutable.HashMap[Int, Option[Any]]
var cnt = 0
val mappedFts = fts.map(ft =>
diff --git a/src/actors/scala/actors/IScheduler.scala b/src/actors/scala/actors/IScheduler.scala
index 35c2d32590..9d61d48561 100644
--- a/src/actors/scala/actors/IScheduler.scala
+++ b/src/actors/scala/actors/IScheduler.scala
@@ -17,6 +17,7 @@ package scala.actors
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait IScheduler {
/** Submits a closure for execution.
diff --git a/src/actors/scala/actors/InputChannel.scala b/src/actors/scala/actors/InputChannel.scala
index 3d7dd7d49b..d2dd6d24df 100644
--- a/src/actors/scala/actors/InputChannel.scala
+++ b/src/actors/scala/actors/InputChannel.scala
@@ -16,6 +16,7 @@ package scala.actors
*
* @define channel `InputChannel`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait InputChannel[+Msg] {
/**
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
index ed9e25c1e6..5045ea56e8 100644
--- a/src/actors/scala/actors/InternalActor.scala
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -524,6 +524,7 @@ private[actors] trait InternalActor extends AbstractActor with InternalReplyReac
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case object TIMEOUT
/**
@@ -534,6 +535,7 @@ case object TIMEOUT
* @param from the actor that terminated
* @param reason the reason that caused the actor to terminate
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class Exit(from: AbstractActor, reason: AnyRef)
/**
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
index 38295138d4..c744984fd8 100644
--- a/src/actors/scala/actors/InternalReplyReactor.scala
+++ b/src/actors/scala/actors/InternalReplyReactor.scala
@@ -12,6 +12,7 @@ import java.util.{TimerTask}
*
* @define actor `ReplyReactor`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
/* A list of the current senders. The head of the list is
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index fd87f813a0..f0f475e123 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -15,6 +15,7 @@ package scala.actors
*
* @define actor `OutputChannel`
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait OutputChannel[-Msg] {
/**
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index f025f6bc29..aa985b3a17 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -52,6 +52,7 @@ private[actors] object Reactor {
*
* @define actor reactor
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
/* The $actor's mailbox. */
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index a2051d4354..01e6da000f 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -7,7 +7,7 @@
\* */
package scala.actors
-@deprecated("Scala Actors are being removed from the standard library. Please refer to the migration guide.", "2.10")
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait ReplyReactor extends InternalReplyReactor {
protected[actors] def sender: OutputChannel[Any] = super.internalSender
}
diff --git a/src/actors/scala/actors/Scheduler.scala b/src/actors/scala/actors/Scheduler.scala
index dd6c110ed3..5b5b4a946d 100644
--- a/src/actors/scala/actors/Scheduler.scala
+++ b/src/actors/scala/actors/Scheduler.scala
@@ -18,6 +18,7 @@ import scheduler.{DelegatingScheduler, ForkJoinScheduler, ResizableThreadPoolSch
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object Scheduler extends DelegatingScheduler {
Debug.info("initializing "+this+"...")
diff --git a/src/actors/scala/actors/SchedulerAdapter.scala b/src/actors/scala/actors/SchedulerAdapter.scala
index fb28b3f93a..b8e66dd6cc 100644
--- a/src/actors/scala/actors/SchedulerAdapter.scala
+++ b/src/actors/scala/actors/SchedulerAdapter.scala
@@ -18,6 +18,7 @@ package scala.actors
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait SchedulerAdapter extends IScheduler {
/** Submits a <code>Runnable</code> for execution.
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
index f225987ddc..02b916a3b5 100644
--- a/src/actors/scala/actors/UncaughtException.scala
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -20,6 +20,7 @@ package scala.actors
* @author Philipp Haller
* @author Erik Engbrecht
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class UncaughtException(actor: InternalActor,
message: Option[Any],
sender: Option[OutputChannel[Any]],
diff --git a/src/actors/scala/actors/package.scala b/src/actors/scala/actors/package.scala
index d176487e03..ae960860cf 100644
--- a/src/actors/scala/actors/package.scala
+++ b/src/actors/scala/actors/package.scala
@@ -14,6 +14,7 @@ package scala
* A starting point for using the actors library would be [[scala.actors.Reactor]],
* [[scala.actors.ReplyReactor]], or [[scala.actors.Actor]] or their companion objects.
*
+ * @note As of release 2.10.1, replaced by <code>akka.actor</code> package. For migration of existing actors refer to the Actors Migration Guide.
*/
package object actors {
diff --git a/src/actors/scala/actors/remote/JavaSerializer.scala b/src/actors/scala/actors/remote/JavaSerializer.scala
index 6e9f4a7c51..7549bbf429 100644
--- a/src/actors/scala/actors/remote/JavaSerializer.scala
+++ b/src/actors/scala/actors/remote/JavaSerializer.scala
@@ -39,6 +39,7 @@ extends ObjectInputStream(in) {
/**
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class JavaSerializer(serv: Service, cl: ClassLoader) extends Serializer(serv) {
def serialize(o: AnyRef): Array[Byte] = {
val bos = new ByteArrayOutputStream()
diff --git a/src/actors/scala/actors/remote/RemoteActor.scala b/src/actors/scala/actors/remote/RemoteActor.scala
index f1644c27ba..799076a01f 100644
--- a/src/actors/scala/actors/remote/RemoteActor.scala
+++ b/src/actors/scala/actors/remote/RemoteActor.scala
@@ -38,6 +38,7 @@ package remote
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object RemoteActor {
private val kernels = new scala.collection.mutable.HashMap[InternalActor, NetKernel]
@@ -127,4 +128,5 @@ object RemoteActor {
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
case class Node(address: String, port: Int)
diff --git a/src/actors/scala/actors/remote/Serializer.scala b/src/actors/scala/actors/remote/Serializer.scala
index e39b01fe24..7be4aa6583 100644
--- a/src/actors/scala/actors/remote/Serializer.scala
+++ b/src/actors/scala/actors/remote/Serializer.scala
@@ -16,6 +16,7 @@ import java.lang.ClassNotFoundException
import java.io.{DataInputStream, DataOutputStream, EOFException, IOException}
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
abstract class Serializer(val service: Service) {
def serialize(o: AnyRef): Array[Byte]
def deserialize(a: Array[Byte]): AnyRef
diff --git a/src/actors/scala/actors/remote/Service.scala b/src/actors/scala/actors/remote/Service.scala
index 4584cc308b..d102df1970 100644
--- a/src/actors/scala/actors/remote/Service.scala
+++ b/src/actors/scala/actors/remote/Service.scala
@@ -14,6 +14,7 @@ package remote
* @version 0.9.10
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait Service {
val kernel = new NetKernel(this)
val serializer: Serializer
diff --git a/src/actors/scala/actors/remote/TcpService.scala b/src/actors/scala/actors/remote/TcpService.scala
index 028dd3a083..ad78ff784c 100644
--- a/src/actors/scala/actors/remote/TcpService.scala
+++ b/src/actors/scala/actors/remote/TcpService.scala
@@ -24,6 +24,7 @@ import scala.util.Random
* @version 0.9.9
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object TcpService {
private val random = new Random
private val ports = new mutable.HashMap[Int, TcpService]
@@ -84,6 +85,7 @@ object TcpService {
* @version 0.9.10
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class TcpService(port: Int, cl: ClassLoader) extends Thread with Service {
val serializer: JavaSerializer = new JavaSerializer(this, cl)
diff --git a/src/actors/scala/actors/scheduler/ActorGC.scala b/src/actors/scala/actors/scheduler/ActorGC.scala
index 6d9a9458ba..a27799d132 100644
--- a/src/actors/scala/actors/scheduler/ActorGC.scala
+++ b/src/actors/scala/actors/scheduler/ActorGC.scala
@@ -23,6 +23,7 @@ import scala.collection.mutable
* (e.g. act method finishes, exit explicitly called, an exception is thrown),
* the ActorGC is informed via the `terminated` method.
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait ActorGC extends TerminationMonitor {
self: IScheduler =>
diff --git a/src/actors/scala/actors/scheduler/DaemonScheduler.scala b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
index a2d6941ec1..b21a1aa3e6 100644
--- a/src/actors/scala/actors/scheduler/DaemonScheduler.scala
+++ b/src/actors/scala/actors/scheduler/DaemonScheduler.scala
@@ -14,6 +14,7 @@ package scheduler
*
* @author Erik Engbrecht
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object DaemonScheduler extends DelegatingScheduler {
protected def makeNewScheduler(): IScheduler = {
diff --git a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
index a1d5666a24..4d3ebc3c04 100644
--- a/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ExecutorScheduler.scala
@@ -19,6 +19,7 @@ import scala.concurrent.ThreadPoolRunner
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
object ExecutorScheduler {
private def start(sched: ExecutorScheduler): ExecutorScheduler = {
@@ -58,6 +59,7 @@ object ExecutorScheduler {
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
trait ExecutorScheduler extends Thread
with IScheduler with TerminationService
with ThreadPoolRunner {
diff --git a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
index ce67ffd037..75a98db6c8 100644
--- a/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ForkJoinScheduler.scala
@@ -9,6 +9,7 @@ import scala.concurrent.forkjoin._
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean, fair: Boolean)
extends Runnable with IScheduler with TerminationMonitor {
@@ -62,7 +63,7 @@ class ForkJoinScheduler(val initCoreSize: Int, val maxSize: Int, daemon: Boolean
while (true) {
this.synchronized {
try {
- wait(CHECK_FREQ)
+ wait(CHECK_FREQ.toLong)
} catch {
case _: InterruptedException =>
}
diff --git a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
index f370d45094..342579db6c 100644
--- a/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
+++ b/src/actors/scala/actors/scheduler/ResizableThreadPoolScheduler.scala
@@ -22,6 +22,7 @@ import scala.concurrent.ManagedBlocker
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class ResizableThreadPoolScheduler(protected val terminate: Boolean,
protected val daemon: Boolean)
extends Thread with IScheduler with TerminationMonitor {
@@ -102,7 +103,7 @@ class ResizableThreadPoolScheduler(protected val terminate: Boolean,
while (true) {
this.synchronized {
try {
- wait(CHECK_FREQ)
+ wait(CHECK_FREQ.toLong)
} catch {
case _: InterruptedException =>
}
diff --git a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
index 04d1d2c5c1..03b235fe74 100644
--- a/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
+++ b/src/actors/scala/actors/scheduler/SingleThreadedScheduler.scala
@@ -17,6 +17,7 @@ import scala.collection.mutable
*
* @author Philipp Haller
*/
+@deprecated("Use the akka.actor package instead. For migration from the scala.actors package refer to the Actors Migration Guide.", "2.11.0")
class SingleThreadedScheduler extends IScheduler {
private val tasks = new mutable.Queue[Runnable]
diff --git a/src/actors/scala/actors/scheduler/TerminationService.scala b/src/actors/scala/actors/scheduler/TerminationService.scala
index 280c8f4131..ed1805ee1e 100644
--- a/src/actors/scala/actors/scheduler/TerminationService.scala
+++ b/src/actors/scala/actors/scheduler/TerminationService.scala
@@ -39,7 +39,7 @@ private[scheduler] trait TerminationService extends TerminationMonitor {
while (true) {
this.synchronized {
try {
- wait(CHECK_FREQ)
+ wait(CHECK_FREQ.toLong)
} catch {
case _: InterruptedException =>
}
diff --git a/src/android-library/scala/reflect/ScalaBeanInfo.scala b/src/android-library/scala/reflect/ScalaBeanInfo.scala
deleted file mode 100644
index 05c7bced71..0000000000
--- a/src/android-library/scala/reflect/ScalaBeanInfo.scala
+++ /dev/null
@@ -1 +0,0 @@
-/* ScalaBeanInfo does not exist for the Android target */
diff --git a/src/asm/scala/tools/asm/AnnotationVisitor.java b/src/asm/scala/tools/asm/AnnotationVisitor.java
index b96e730a73..c806ca71e8 100644
--- a/src/asm/scala/tools/asm/AnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/AnnotationVisitor.java
@@ -54,8 +54,9 @@ public abstract class AnnotationVisitor {
/**
* Constructs a new {@link AnnotationVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public AnnotationVisitor(final int api) {
this(api, null);
@@ -64,15 +65,17 @@ public abstract class AnnotationVisitor {
/**
* Constructs a new {@link AnnotationVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param av the annotation visitor to which this visitor must delegate
- * method calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param av
+ * the annotation visitor to which this visitor must delegate
+ * method calls. May be null.
*/
public AnnotationVisitor(final int api, final AnnotationVisitor av) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.av = av;
}
@@ -80,14 +83,17 @@ public abstract class AnnotationVisitor {
/**
* Visits a primitive value of the annotation.
*
- * @param name the value name.
- * @param value the actual value, whose type must be {@link Byte},
- * {@link Boolean}, {@link Character}, {@link Short}, {@link Integer}
- * , {@link Long}, {@link Float}, {@link Double}, {@link String} or
- * {@link Type} or OBJECT or ARRAY sort. This value can also be an
- * array of byte, boolean, short, char, int, long, float or double
- * values (this is equivalent to using {@link #visitArray visitArray}
- * and visiting each array element in turn, but is more convenient).
+ * @param name
+ * the value name.
+ * @param value
+ * the actual value, whose type must be {@link Byte},
+ * {@link Boolean}, {@link Character}, {@link Short},
+ * {@link Integer} , {@link Long}, {@link Float}, {@link Double},
+ * {@link String} or {@link Type} or OBJECT or ARRAY sort. This
+ * value can also be an array of byte, boolean, short, char, int,
+ * long, float or double values (this is equivalent to using
+ * {@link #visitArray visitArray} and visiting each array element
+ * in turn, but is more convenient).
*/
public void visit(String name, Object value) {
if (av != null) {
@@ -98,9 +104,12 @@ public abstract class AnnotationVisitor {
/**
* Visits an enumeration value of the annotation.
*
- * @param name the value name.
- * @param desc the class descriptor of the enumeration class.
- * @param value the actual enumeration value.
+ * @param name
+ * the value name.
+ * @param desc
+ * the class descriptor of the enumeration class.
+ * @param value
+ * the actual enumeration value.
*/
public void visitEnum(String name, String desc, String value) {
if (av != null) {
@@ -111,12 +120,14 @@ public abstract class AnnotationVisitor {
/**
* Visits a nested annotation value of the annotation.
*
- * @param name the value name.
- * @param desc the class descriptor of the nested annotation class.
+ * @param name
+ * the value name.
+ * @param desc
+ * the class descriptor of the nested annotation class.
* @return a visitor to visit the actual nested annotation value, or
- * <tt>null</tt> if this visitor is not interested in visiting
- * this nested annotation. <i>The nested annotation value must be
- * fully visited before calling other methods on this annotation
+ * <tt>null</tt> if this visitor is not interested in visiting this
+ * nested annotation. <i>The nested annotation value must be fully
+ * visited before calling other methods on this annotation
* visitor</i>.
*/
public AnnotationVisitor visitAnnotation(String name, String desc) {
@@ -132,10 +143,11 @@ public abstract class AnnotationVisitor {
* can be passed as value to {@link #visit visit}. This is what
* {@link ClassReader} does.
*
- * @param name the value name.
+ * @param name
+ * the value name.
* @return a visitor to visit the actual array value elements, or
- * <tt>null</tt> if this visitor is not interested in visiting
- * these values. The 'name' parameters passed to the methods of this
+ * <tt>null</tt> if this visitor is not interested in visiting these
+ * values. The 'name' parameters passed to the methods of this
* visitor are ignored. <i>All the array values must be visited
* before calling other methods on this annotation visitor</i>.
*/
diff --git a/src/asm/scala/tools/asm/AnnotationWriter.java b/src/asm/scala/tools/asm/AnnotationWriter.java
index e530780249..8eb5b2ef48 100644
--- a/src/asm/scala/tools/asm/AnnotationWriter.java
+++ b/src/asm/scala/tools/asm/AnnotationWriter.java
@@ -90,20 +90,20 @@ final class AnnotationWriter extends AnnotationVisitor {
/**
* Constructs a new {@link AnnotationWriter}.
*
- * @param cw the class writer to which this annotation must be added.
- * @param named <tt>true<tt> if values are named, <tt>false</tt> otherwise.
- * @param bv where the annotation values must be stored.
- * @param parent where the number of annotation values must be stored.
- * @param offset where in <tt>parent</tt> the number of annotation values must
- * be stored.
+ * @param cw
+ * the class writer to which this annotation must be added.
+ * @param named
+ * <tt>true<tt> if values are named, <tt>false</tt> otherwise.
+ * @param bv
+ * where the annotation values must be stored.
+ * @param parent
+ * where the number of annotation values must be stored.
+ * @param offset
+ * where in <tt>parent</tt> the number of annotation values must
+ * be stored.
*/
- AnnotationWriter(
- final ClassWriter cw,
- final boolean named,
- final ByteVector bv,
- final ByteVector parent,
- final int offset)
- {
+ AnnotationWriter(final ClassWriter cw, final boolean named,
+ final ByteVector bv, final ByteVector parent, final int offset) {
super(Opcodes.ASM4);
this.cw = cw;
this.named = named;
@@ -190,11 +190,8 @@ final class AnnotationWriter extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
++size;
if (named) {
bv.putShort(cw.newUTF8(name));
@@ -203,10 +200,8 @@ final class AnnotationWriter extends AnnotationVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
++size;
if (named) {
bv.putShort(cw.newUTF8(name));
@@ -259,7 +254,8 @@ final class AnnotationWriter extends AnnotationVisitor {
* Puts the annotations of this annotation writer list into the given byte
* vector.
*
- * @param out where the annotations must be put.
+ * @param out
+ * where the annotations must be put.
*/
void put(final ByteVector out) {
int n = 0;
@@ -286,15 +282,15 @@ final class AnnotationWriter extends AnnotationVisitor {
/**
* Puts the given annotation lists into the given byte vector.
*
- * @param panns an array of annotation writer lists.
- * @param off index of the first annotation to be written.
- * @param out where the annotations must be put.
+ * @param panns
+ * an array of annotation writer lists.
+ * @param off
+ * index of the first annotation to be written.
+ * @param out
+ * where the annotations must be put.
*/
- static void put(
- final AnnotationWriter[] panns,
- final int off,
- final ByteVector out)
- {
+ static void put(final AnnotationWriter[] panns, final int off,
+ final ByteVector out) {
int size = 1 + 2 * (panns.length - off);
for (int i = off; i < panns.length; ++i) {
size += panns[i] == null ? 0 : panns[i].getSize();
diff --git a/src/asm/scala/tools/asm/Attribute.java b/src/asm/scala/tools/asm/Attribute.java
index 408f21ce1e..ac40a758a2 100644
--- a/src/asm/scala/tools/asm/Attribute.java
+++ b/src/asm/scala/tools/asm/Attribute.java
@@ -55,7 +55,8 @@ public class Attribute {
/**
* Constructs a new empty attribute.
*
- * @param type the type of the attribute.
+ * @param type
+ * the type of the attribute.
*/
protected Attribute(final String type) {
this.type = type;
@@ -91,39 +92,39 @@ public class Attribute {
}
/**
- * Reads a {@link #type type} attribute. This method must return a <i>new</i>
- * {@link Attribute} object, of type {@link #type type}, corresponding to
- * the <tt>len</tt> bytes starting at the given offset, in the given class
- * reader.
+ * Reads a {@link #type type} attribute. This method must return a
+ * <i>new</i> {@link Attribute} object, of type {@link #type type},
+ * corresponding to the <tt>len</tt> bytes starting at the given offset, in
+ * the given class reader.
*
- * @param cr the class that contains the attribute to be read.
- * @param off index of the first byte of the attribute's content in {@link
- * ClassReader#b cr.b}. The 6 attribute header bytes, containing the
- * type and the length of the attribute, are not taken into account
- * here.
- * @param len the length of the attribute's content.
- * @param buf buffer to be used to call
- * {@link ClassReader#readUTF8 readUTF8},
- * {@link ClassReader#readClass(int,char[]) readClass} or
- * {@link ClassReader#readConst readConst}.
- * @param codeOff index of the first byte of code's attribute content in
- * {@link ClassReader#b cr.b}, or -1 if the attribute to be read is
- * not a code attribute. The 6 attribute header bytes, containing the
- * type and the length of the attribute, are not taken into account
- * here.
- * @param labels the labels of the method's code, or <tt>null</tt> if the
- * attribute to be read is not a code attribute.
+ * @param cr
+ * the class that contains the attribute to be read.
+ * @param off
+ * index of the first byte of the attribute's content in
+ * {@link ClassReader#b cr.b}. The 6 attribute header bytes,
+ * containing the type and the length of the attribute, are not
+ * taken into account here.
+ * @param len
+ * the length of the attribute's content.
+ * @param buf
+ * buffer to be used to call {@link ClassReader#readUTF8
+ * readUTF8}, {@link ClassReader#readClass(int,char[]) readClass}
+ * or {@link ClassReader#readConst readConst}.
+ * @param codeOff
+ * index of the first byte of code's attribute content in
+ * {@link ClassReader#b cr.b}, or -1 if the attribute to be read
+ * is not a code attribute. The 6 attribute header bytes,
+ * containing the type and the length of the attribute, are not
+ * taken into account here.
+ * @param labels
+ * the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
* @return a <i>new</i> {@link Attribute} object corresponding to the given
* bytes.
*/
- protected Attribute read(
- final ClassReader cr,
- final int off,
- final int len,
- final char[] buf,
- final int codeOff,
- final Label[] labels)
- {
+ protected Attribute read(final ClassReader cr, final int off,
+ final int len, final char[] buf, final int codeOff,
+ final Label[] labels) {
Attribute attr = new Attribute(type);
attr.value = new byte[len];
System.arraycopy(cr.b, off, attr.value, 0, len);
@@ -133,30 +134,30 @@ public class Attribute {
/**
* Returns the byte array form of this attribute.
*
- * @param cw the class to which this attribute must be added. This parameter
- * can be used to add to the constant pool of this class the items
- * that corresponds to this attribute.
- * @param code the bytecode of the method corresponding to this code
- * attribute, or <tt>null</tt> if this attribute is not a code
- * attributes.
- * @param len the length of the bytecode of the method corresponding to this
- * code attribute, or <tt>null</tt> if this attribute is not a code
- * attribute.
- * @param maxStack the maximum stack size of the method corresponding to
- * this code attribute, or -1 if this attribute is not a code
- * attribute.
- * @param maxLocals the maximum number of local variables of the method
- * corresponding to this code attribute, or -1 if this attribute is
- * not a code attribute.
+ * @param cw
+ * the class to which this attribute must be added. This
+ * parameter can be used to add to the constant pool of this
+ * class the items that corresponds to this attribute.
+ * @param code
+ * the bytecode of the method corresponding to this code
+ * attribute, or <tt>null</tt> if this attribute is not a code
+ * attributes.
+ * @param len
+ * the length of the bytecode of the method corresponding to this
+ * code attribute, or <tt>null</tt> if this attribute is not a
+ * code attribute.
+ * @param maxStack
+ * the maximum stack size of the method corresponding to this
+ * code attribute, or -1 if this attribute is not a code
+ * attribute.
+ * @param maxLocals
+ * the maximum number of local variables of the method
+ * corresponding to this code attribute, or -1 if this attribute
+ * is not a code attribute.
* @return the byte array form of this attribute.
*/
- protected ByteVector write(
- final ClassWriter cw,
- final byte[] code,
- final int len,
- final int maxStack,
- final int maxLocals)
- {
+ protected ByteVector write(final ClassWriter cw, final byte[] code,
+ final int len, final int maxStack, final int maxLocals) {
ByteVector v = new ByteVector();
v.data = value;
v.length = value.length;
@@ -181,30 +182,30 @@ public class Attribute {
/**
* Returns the size of all the attributes in this attribute list.
*
- * @param cw the class writer to be used to convert the attributes into byte
- * arrays, with the {@link #write write} method.
- * @param code the bytecode of the method corresponding to these code
- * attributes, or <tt>null</tt> if these attributes are not code
- * attributes.
- * @param len the length of the bytecode of the method corresponding to
- * these code attributes, or <tt>null</tt> if these attributes are
- * not code attributes.
- * @param maxStack the maximum stack size of the method corresponding to
- * these code attributes, or -1 if these attributes are not code
- * attributes.
- * @param maxLocals the maximum number of local variables of the method
- * corresponding to these code attributes, or -1 if these attributes
- * are not code attributes.
+ * @param cw
+ * the class writer to be used to convert the attributes into
+ * byte arrays, with the {@link #write write} method.
+ * @param code
+ * the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len
+ * the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes
+ * are not code attributes.
+ * @param maxStack
+ * the maximum stack size of the method corresponding to these
+ * code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals
+ * the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these
+ * attributes are not code attributes.
* @return the size of all the attributes in this attribute list. This size
* includes the size of the attribute headers.
*/
- final int getSize(
- final ClassWriter cw,
- final byte[] code,
- final int len,
- final int maxStack,
- final int maxLocals)
- {
+ final int getSize(final ClassWriter cw, final byte[] code, final int len,
+ final int maxStack, final int maxLocals) {
Attribute attr = this;
int size = 0;
while (attr != null) {
@@ -219,30 +220,30 @@ public class Attribute {
* Writes all the attributes of this attribute list in the given byte
* vector.
*
- * @param cw the class writer to be used to convert the attributes into byte
- * arrays, with the {@link #write write} method.
- * @param code the bytecode of the method corresponding to these code
- * attributes, or <tt>null</tt> if these attributes are not code
- * attributes.
- * @param len the length of the bytecode of the method corresponding to
- * these code attributes, or <tt>null</tt> if these attributes are
- * not code attributes.
- * @param maxStack the maximum stack size of the method corresponding to
- * these code attributes, or -1 if these attributes are not code
- * attributes.
- * @param maxLocals the maximum number of local variables of the method
- * corresponding to these code attributes, or -1 if these attributes
- * are not code attributes.
- * @param out where the attributes must be written.
+ * @param cw
+ * the class writer to be used to convert the attributes into
+ * byte arrays, with the {@link #write write} method.
+ * @param code
+ * the bytecode of the method corresponding to these code
+ * attributes, or <tt>null</tt> if these attributes are not code
+ * attributes.
+ * @param len
+ * the length of the bytecode of the method corresponding to
+ * these code attributes, or <tt>null</tt> if these attributes
+ * are not code attributes.
+ * @param maxStack
+ * the maximum stack size of the method corresponding to these
+ * code attributes, or -1 if these attributes are not code
+ * attributes.
+ * @param maxLocals
+ * the maximum number of local variables of the method
+ * corresponding to these code attributes, or -1 if these
+ * attributes are not code attributes.
+ * @param out
+ * where the attributes must be written.
*/
- final void put(
- final ClassWriter cw,
- final byte[] code,
- final int len,
- final int maxStack,
- final int maxLocals,
- final ByteVector out)
- {
+ final void put(final ClassWriter cw, final byte[] code, final int len,
+ final int maxStack, final int maxLocals, final ByteVector out) {
Attribute attr = this;
while (attr != null) {
ByteVector b = attr.write(cw, code, len, maxStack, maxLocals);
diff --git a/src/asm/scala/tools/asm/ByteVector.java b/src/asm/scala/tools/asm/ByteVector.java
index 5081f0184b..2bc63eb384 100644
--- a/src/asm/scala/tools/asm/ByteVector.java
+++ b/src/asm/scala/tools/asm/ByteVector.java
@@ -59,7 +59,8 @@ public class ByteVector {
* Constructs a new {@link ByteVector ByteVector} with the given initial
* size.
*
- * @param initialSize the initial size of the byte vector to be constructed.
+ * @param initialSize
+ * the initial size of the byte vector to be constructed.
*/
public ByteVector(final int initialSize) {
data = new byte[initialSize];
@@ -69,7 +70,8 @@ public class ByteVector {
* Puts a byte into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param b a byte.
+ * @param b
+ * a byte.
* @return this byte vector.
*/
public ByteVector putByte(final int b) {
@@ -86,8 +88,10 @@ public class ByteVector {
* Puts two bytes into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param b1 a byte.
- * @param b2 another byte.
+ * @param b1
+ * a byte.
+ * @param b2
+ * another byte.
* @return this byte vector.
*/
ByteVector put11(final int b1, final int b2) {
@@ -106,7 +110,8 @@ public class ByteVector {
* Puts a short into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param s a short.
+ * @param s
+ * a short.
* @return this byte vector.
*/
public ByteVector putShort(final int s) {
@@ -125,8 +130,10 @@ public class ByteVector {
* Puts a byte and a short into this byte vector. The byte vector is
* automatically enlarged if necessary.
*
- * @param b a byte.
- * @param s a short.
+ * @param b
+ * a byte.
+ * @param s
+ * a short.
* @return this byte vector.
*/
ByteVector put12(final int b, final int s) {
@@ -146,7 +153,8 @@ public class ByteVector {
* Puts an int into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param i an int.
+ * @param i
+ * an int.
* @return this byte vector.
*/
public ByteVector putInt(final int i) {
@@ -167,7 +175,8 @@ public class ByteVector {
* Puts a long into this byte vector. The byte vector is automatically
* enlarged if necessary.
*
- * @param l a long.
+ * @param l
+ * a long.
* @return this byte vector.
*/
public ByteVector putLong(final long l) {
@@ -194,7 +203,8 @@ public class ByteVector {
* Puts an UTF8 string into this byte vector. The byte vector is
* automatically enlarged if necessary.
*
- * @param s a String.
+ * @param s
+ * a String.
* @return this byte vector.
*/
public ByteVector putUTF8(final String s) {
@@ -259,14 +269,16 @@ public class ByteVector {
* Puts an array of bytes into this byte vector. The byte vector is
* automatically enlarged if necessary.
*
- * @param b an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
- * null bytes into this byte vector.
- * @param off index of the fist byte of b that must be copied.
- * @param len number of bytes of b that must be copied.
+ * @param b
+ * an array of bytes. May be <tt>null</tt> to put <tt>len</tt>
+ * null bytes into this byte vector.
+ * @param off
+ * index of the fist byte of b that must be copied.
+ * @param len
+ * number of bytes of b that must be copied.
* @return this byte vector.
*/
- public ByteVector putByteArray(final byte[] b, final int off, final int len)
- {
+ public ByteVector putByteArray(final byte[] b, final int off, final int len) {
if (length + len > data.length) {
enlarge(len);
}
@@ -280,8 +292,9 @@ public class ByteVector {
/**
* Enlarge this byte vector so that it can receive n more bytes.
*
- * @param size number of additional bytes that this byte vector should be
- * able to receive.
+ * @param size
+ * number of additional bytes that this byte vector should be
+ * able to receive.
*/
private void enlarge(final int size) {
int length1 = 2 * data.length;
diff --git a/src/asm/scala/tools/asm/ClassReader.java b/src/asm/scala/tools/asm/ClassReader.java
index f3287d41ae..cc655c1b62 100644
--- a/src/asm/scala/tools/asm/ClassReader.java
+++ b/src/asm/scala/tools/asm/ClassReader.java
@@ -112,9 +112,8 @@ public class ClassReader {
public final byte[] b;
/**
- * The start index of each constant pool item in {@link #b b}, plus one.
- * The one byte offset skips the constant pool item tag that indicates its
- * type.
+ * The start index of each constant pool item in {@link #b b}, plus one. The
+ * one byte offset skips the constant pool item tag that indicates its type.
*/
private final int[] items;
@@ -147,7 +146,8 @@ public class ClassReader {
/**
* Constructs a new {@link ClassReader} object.
*
- * @param b the bytecode of the class to be read.
+ * @param b
+ * the bytecode of the class to be read.
*/
public ClassReader(final byte[] b) {
this(b, 0, b.length);
@@ -156,14 +156,17 @@ public class ClassReader {
/**
* Constructs a new {@link ClassReader} object.
*
- * @param b the bytecode of the class to be read.
- * @param off the start offset of the class data.
- * @param len the length of the class data.
+ * @param b
+ * the bytecode of the class to be read.
+ * @param off
+ * the start offset of the class data.
+ * @param len
+ * the length of the class data.
*/
public ClassReader(final byte[] b, final int off, final int len) {
this.b = b;
// checks the class version
- if (readShort(6) > Opcodes.V1_7) {
+ if (readShort(off + 6) > Opcodes.V1_7) {
throw new IllegalArgumentException();
}
// parses the constant pool
@@ -176,35 +179,35 @@ public class ClassReader {
items[i] = index + 1;
int size;
switch (b[index]) {
- case ClassWriter.FIELD:
- case ClassWriter.METH:
- case ClassWriter.IMETH:
- case ClassWriter.INT:
- case ClassWriter.FLOAT:
- case ClassWriter.NAME_TYPE:
- case ClassWriter.INDY:
- size = 5;
- break;
- case ClassWriter.LONG:
- case ClassWriter.DOUBLE:
- size = 9;
- ++i;
- break;
- case ClassWriter.UTF8:
- size = 3 + readUnsignedShort(index + 1);
- if (size > max) {
- max = size;
- }
- break;
- case ClassWriter.HANDLE:
- size = 4;
- break;
- // case ClassWriter.CLASS:
- // case ClassWriter.STR:
- // case ClassWriter.MTYPE
- default:
- size = 3;
- break;
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ case ClassWriter.NAME_TYPE:
+ case ClassWriter.INDY:
+ size = 5;
+ break;
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ size = 9;
+ ++i;
+ break;
+ case ClassWriter.UTF8:
+ size = 3 + readUnsignedShort(index + 1);
+ if (size > max) {
+ max = size;
+ }
+ break;
+ case ClassWriter.HANDLE:
+ size = 4;
+ break;
+ // case ClassWriter.CLASS:
+ // case ClassWriter.STR:
+ // case ClassWriter.MTYPE
+ default:
+ size = 3;
+ break;
}
index += size;
}
@@ -249,8 +252,7 @@ public class ClassReader {
* @see ClassVisitor#visit(int, int, String, String, String, String[])
*/
public String getSuperName() {
- int n = items[readUnsignedShort(header + 4)];
- return n == 0 ? null : readUTF8(n, new char[maxStringLength]);
+ return readClass(header + 4, new char[maxStringLength]);
}
/**
@@ -280,7 +282,8 @@ public class ClassReader {
* Copies the constant pool data into the given {@link ClassWriter}. Should
* be called before the {@link #accept(ClassVisitor,int)} method.
*
- * @param classWriter the {@link ClassWriter} to copy constant pool into.
+ * @param classWriter
+ * the {@link ClassWriter} to copy constant pool into.
*/
void copyPool(final ClassWriter classWriter) {
char[] buf = new char[maxStringLength];
@@ -292,82 +295,63 @@ public class ClassReader {
Item item = new Item(i);
int nameType;
switch (tag) {
- case ClassWriter.FIELD:
- case ClassWriter.METH:
- case ClassWriter.IMETH:
- nameType = items[readUnsignedShort(index + 2)];
- item.set(tag,
- readClass(index, buf),
- readUTF8(nameType, buf),
- readUTF8(nameType + 2, buf));
- break;
-
- case ClassWriter.INT:
- item.set(readInt(index));
- break;
-
- case ClassWriter.FLOAT:
- item.set(Float.intBitsToFloat(readInt(index)));
- break;
-
- case ClassWriter.NAME_TYPE:
- item.set(tag,
- readUTF8(index, buf),
- readUTF8(index + 2, buf),
- null);
- break;
-
- case ClassWriter.LONG:
- item.set(readLong(index));
- ++i;
- break;
-
- case ClassWriter.DOUBLE:
- item.set(Double.longBitsToDouble(readLong(index)));
- ++i;
- break;
-
- case ClassWriter.UTF8: {
- String s = strings[i];
- if (s == null) {
- index = items[i];
- s = strings[i] = readUTF(index + 2,
- readUnsignedShort(index),
- buf);
- }
- item.set(tag, s, null, null);
+ case ClassWriter.FIELD:
+ case ClassWriter.METH:
+ case ClassWriter.IMETH:
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(tag, readClass(index, buf), readUTF8(nameType, buf),
+ readUTF8(nameType + 2, buf));
+ break;
+ case ClassWriter.INT:
+ item.set(readInt(index));
+ break;
+ case ClassWriter.FLOAT:
+ item.set(Float.intBitsToFloat(readInt(index)));
+ break;
+ case ClassWriter.NAME_TYPE:
+ item.set(tag, readUTF8(index, buf), readUTF8(index + 2, buf),
+ null);
+ break;
+ case ClassWriter.LONG:
+ item.set(readLong(index));
+ ++i;
+ break;
+ case ClassWriter.DOUBLE:
+ item.set(Double.longBitsToDouble(readLong(index)));
+ ++i;
+ break;
+ case ClassWriter.UTF8: {
+ String s = strings[i];
+ if (s == null) {
+ index = items[i];
+ s = strings[i] = readUTF(index + 2,
+ readUnsignedShort(index), buf);
}
- break;
-
- case ClassWriter.HANDLE: {
- int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
- nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
- item.set(ClassWriter.HANDLE_BASE + readByte(index),
- readClass(fieldOrMethodRef, buf),
- readUTF8(nameType, buf),
- readUTF8(nameType + 2, buf));
-
+ item.set(tag, s, null, null);
+ break;
+ }
+ case ClassWriter.HANDLE: {
+ int fieldOrMethodRef = items[readUnsignedShort(index + 1)];
+ nameType = items[readUnsignedShort(fieldOrMethodRef + 2)];
+ item.set(ClassWriter.HANDLE_BASE + readByte(index),
+ readClass(fieldOrMethodRef, buf),
+ readUTF8(nameType, buf), readUTF8(nameType + 2, buf));
+ break;
+ }
+ case ClassWriter.INDY:
+ if (classWriter.bootstrapMethods == null) {
+ copyBootstrapMethods(classWriter, items2, buf);
}
- break;
-
-
- case ClassWriter.INDY:
- if (classWriter.bootstrapMethods == null) {
- copyBootstrapMethods(classWriter, items2, buf);
- }
- nameType = items[readUnsignedShort(index + 2)];
- item.set(readUTF8(nameType, buf),
- readUTF8(nameType + 2, buf),
- readUnsignedShort(index));
- break;
-
-
- // case ClassWriter.STR:
- // case ClassWriter.CLASS:
- // case ClassWriter.MTYPE
- default:
- item.set(tag, readUTF8(index, buf), null, null);
- break;
+ nameType = items[readUnsignedShort(index + 2)];
+ item.set(readUTF8(nameType, buf), readUTF8(nameType + 2, buf),
+ readUnsignedShort(index));
+ break;
+ // case ClassWriter.STR:
+ // case ClassWriter.CLASS:
+ // case ClassWriter.MTYPE
+ default:
+ item.set(tag, readUTF8(index, buf), null, null);
+ break;
}
int index2 = item.hashCode % items2.length;
@@ -382,77 +366,59 @@ public class ClassReader {
classWriter.index = ll;
}
- private void copyBootstrapMethods(ClassWriter classWriter, Item[] items2, char[] buf) {
- int i, j, k, u, v;
-
- // skip class header
- v = header;
- v += 8 + (readUnsignedShort(v + 6) << 1);
-
- // skips fields and methods
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
+ /**
+ * Copies the bootstrap method data into the given {@link ClassWriter}.
+ * Should be called before the {@link #accept(ClassVisitor,int)} method.
+ *
+ * @param classWriter
+ * the {@link ClassWriter} to copy bootstrap methods into.
+ */
+ private void copyBootstrapMethods(final ClassWriter classWriter,
+ final Item[] items, final char[] c) {
+ // finds the "BootstrapMethods" attribute
+ int u = getAttributes();
+ boolean found = false;
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ if ("BootstrapMethods".equals(attrName)) {
+ found = true;
+ break;
}
+ u += 6 + readInt(u + 4);
}
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
- }
+ if (!found) {
+ return;
}
-
- // read class attributes
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- String attrName = readUTF8(v, buf);
- int size = readInt(v + 2);
- if ("BootstrapMethods".equals(attrName)) {
- int boostrapMethodCount = readUnsignedShort(v + 6);
- int x = v + 8;
- for (j = 0; j < boostrapMethodCount; j++) {
- int hashCode = readConst(readUnsignedShort(x), buf).hashCode();
- k = readUnsignedShort(x + 2);
- u = x + 4;
- for(; k > 0; --k) {
- hashCode ^= readConst(readUnsignedShort(u), buf).hashCode();
- u += 2;
- }
- Item item = new Item(j);
- item.set(x - v - 8, hashCode & 0x7FFFFFFF);
-
- int index2 = item.hashCode % items2.length;
- item.next = items2[index2];
- items2[index2] = item;
-
- x = u;
- }
-
- classWriter.bootstrapMethodsCount = boostrapMethodCount;
- ByteVector bootstrapMethods = new ByteVector(size + 62);
- bootstrapMethods.putByteArray(b, v + 8, size - 2);
- classWriter.bootstrapMethods = bootstrapMethods;
- return;
+ // copies the bootstrap methods in the class writer
+ int boostrapMethodCount = readUnsignedShort(u + 8);
+ for (int j = 0, v = u + 10; j < boostrapMethodCount; j++) {
+ int position = v - u - 10;
+ int hashCode = readConst(readUnsignedShort(v), c).hashCode();
+ for (int k = readUnsignedShort(v + 2); k > 0; --k) {
+ hashCode ^= readConst(readUnsignedShort(v + 4), c).hashCode();
+ v += 2;
}
- v += 6 + size;
+ v += 4;
+ Item item = new Item(j);
+ item.set(position, hashCode & 0x7FFFFFFF);
+ int index = item.hashCode % items.length;
+ item.next = items[index];
+ items[index] = item;
}
-
- // we are in trouble !!!
+ int attrSize = readInt(u + 4);
+ ByteVector bootstrapMethods = new ByteVector(attrSize + 62);
+ bootstrapMethods.putByteArray(b, u + 10, attrSize - 2);
+ classWriter.bootstrapMethodsCount = boostrapMethodCount;
+ classWriter.bootstrapMethods = bootstrapMethods;
}
/**
* Constructs a new {@link ClassReader} object.
*
- * @param is an input stream from which to read the class.
- * @throws IOException if a problem occurs during reading.
+ * @param is
+ * an input stream from which to read the class.
+ * @throws IOException
+ * if a problem occurs during reading.
*/
public ClassReader(final InputStream is) throws IOException {
this(readClass(is, false));
@@ -461,25 +427,30 @@ public class ClassReader {
/**
* Constructs a new {@link ClassReader} object.
*
- * @param name the binary qualified name of the class to be read.
- * @throws IOException if an exception occurs during reading.
+ * @param name
+ * the binary qualified name of the class to be read.
+ * @throws IOException
+ * if an exception occurs during reading.
*/
public ClassReader(final String name) throws IOException {
- this(readClass(ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
- + ".class"), true));
+ this(readClass(
+ ClassLoader.getSystemResourceAsStream(name.replace('.', '/')
+ + ".class"), true));
}
/**
* Reads the bytecode of a class.
*
- * @param is an input stream from which to read the class.
- * @param close true to close the input stream after reading.
+ * @param is
+ * an input stream from which to read the class.
+ * @param close
+ * true to close the input stream after reading.
* @return the bytecode read from the given input stream.
- * @throws IOException if a problem occurs during reading.
+ * @throws IOException
+ * if a problem occurs during reading.
*/
private static byte[] readClass(final InputStream is, boolean close)
- throws IOException
- {
+ throws IOException {
if (is == null) {
throw new IOException("Class not found");
}
@@ -520,14 +491,16 @@ public class ClassReader {
// ------------------------------------------------------------------------
/**
- * Makes the given visitor visit the Java class of this {@link ClassReader}.
- * This class is the one specified in the constructor (see
+ * Makes the given visitor visit the Java class of this {@link ClassReader}
+ * . This class is the one specified in the constructor (see
* {@link #ClassReader(byte[]) ClassReader}).
*
- * @param classVisitor the visitor that must visit this class.
- * @param flags option flags that can be used to modify the default behavior
- * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
- * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ * @param classVisitor
+ * the visitor that must visit this class.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}
+ * , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
*/
public void accept(final ClassVisitor classVisitor, final int flags) {
accept(classVisitor, new Attribute[0], flags);
@@ -538,1117 +511,923 @@ public class ClassReader {
* This class is the one specified in the constructor (see
* {@link #ClassReader(byte[]) ClassReader}).
*
- * @param classVisitor the visitor that must visit this class.
- * @param attrs prototypes of the attributes that must be parsed during the
- * visit of the class. Any attribute whose type is not equal to the
- * type of one the prototypes will not be parsed: its byte array
- * value will be passed unchanged to the ClassWriter. <i>This may
- * corrupt it if this value contains references to the constant pool,
- * or has syntactic or semantic links with a class element that has
- * been transformed by a class adapter between the reader and the
- * writer</i>.
- * @param flags option flags that can be used to modify the default behavior
- * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES},
- * {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
+ * @param classVisitor
+ * the visitor that must visit this class.
+ * @param attrs
+ * prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to
+ * the type of one the prototypes will not be parsed: its byte
+ * array value will be passed unchanged to the ClassWriter.
+ * <i>This may corrupt it if this value contains references to
+ * the constant pool, or has syntactic or semantic links with a
+ * class element that has been transformed by a class adapter
+ * between the reader and the writer</i>.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. See {@link #SKIP_DEBUG}, {@link #EXPAND_FRAMES}
+ * , {@link #SKIP_FRAMES}, {@link #SKIP_CODE}.
*/
- public void accept(
- final ClassVisitor classVisitor,
- final Attribute[] attrs,
- final int flags)
- {
- byte[] b = this.b; // the bytecode array
+ public void accept(final ClassVisitor classVisitor,
+ final Attribute[] attrs, final int flags) {
+ int u = header; // current offset in the class file
char[] c = new char[maxStringLength]; // buffer used to read strings
- int i, j, k; // loop variables
- int u, v, w; // indexes in b
- Attribute attr;
-
- int access;
- String name;
- String desc;
- String attrName;
- String signature;
- int anns = 0;
- int ianns = 0;
- Attribute cattrs = null;
-
- // visits the header
- u = header;
- access = readUnsignedShort(u);
- name = readClass(u + 2, c);
- v = items[readUnsignedShort(u + 4)];
- String superClassName = v == 0 ? null : readUTF8(v, c);
- String[] implementedItfs = new String[readUnsignedShort(u + 6)];
- w = 0;
+
+ Context context = new Context();
+ context.attrs = attrs;
+ context.flags = flags;
+ context.buffer = c;
+
+ // reads the class declaration
+ int access = readUnsignedShort(u);
+ String name = readClass(u + 2, c);
+ String superClass = readClass(u + 4, c);
+ String[] interfaces = new String[readUnsignedShort(u + 6)];
u += 8;
- for (i = 0; i < implementedItfs.length; ++i) {
- implementedItfs[i] = readClass(u, c);
+ for (int i = 0; i < interfaces.length; ++i) {
+ interfaces[i] = readClass(u, c);
u += 2;
}
- boolean skipCode = (flags & SKIP_CODE) != 0;
- boolean skipDebug = (flags & SKIP_DEBUG) != 0;
- boolean unzip = (flags & EXPAND_FRAMES) != 0;
-
- // skips fields and methods
- v = u;
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
- }
- }
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- j = readUnsignedShort(v + 6);
- v += 8;
- for (; j > 0; --j) {
- v += 6 + readInt(v + 2);
- }
- }
- // reads the class's attributes
- signature = null;
+ // reads the class attributes
+ String signature = null;
String sourceFile = null;
String sourceDebug = null;
String enclosingOwner = null;
String enclosingName = null;
String enclosingDesc = null;
- int[] bootstrapMethods = null; // start indexed of the bsms
+ int anns = 0;
+ int ianns = 0;
+ int innerClasses = 0;
+ Attribute attributes = null;
- i = readUnsignedShort(v);
- v += 2;
- for (; i > 0; --i) {
- attrName = readUTF8(v, c);
+ u = getAttributes();
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
// tests are sorted in decreasing frequency order
// (based on frequencies observed on typical classes)
if ("SourceFile".equals(attrName)) {
- sourceFile = readUTF8(v + 6, c);
+ sourceFile = readUTF8(u + 8, c);
} else if ("InnerClasses".equals(attrName)) {
- w = v + 6;
+ innerClasses = u + 8;
} else if ("EnclosingMethod".equals(attrName)) {
- enclosingOwner = readClass(v + 6, c);
- int item = readUnsignedShort(v + 8);
+ enclosingOwner = readClass(u + 8, c);
+ int item = readUnsignedShort(u + 10);
if (item != 0) {
enclosingName = readUTF8(items[item], c);
enclosingDesc = readUTF8(items[item] + 2, c);
}
} else if (SIGNATURES && "Signature".equals(attrName)) {
- signature = readUTF8(v + 6, c);
- } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
- anns = v + 6;
+ signature = readUTF8(u + 8, c);
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 8;
} else if ("Deprecated".equals(attrName)) {
access |= Opcodes.ACC_DEPRECATED;
} else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ access |= Opcodes.ACC_SYNTHETIC
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
} else if ("SourceDebugExtension".equals(attrName)) {
- int len = readInt(v + 2);
- sourceDebug = readUTF(v + 6, len, new char[len]);
- } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
- ianns = v + 6;
+ int len = readInt(u + 4);
+ sourceDebug = readUTF(u + 8, len, new char[len]);
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 8;
} else if ("BootstrapMethods".equals(attrName)) {
- int boostrapMethodCount = readUnsignedShort(v + 6);
- bootstrapMethods = new int[boostrapMethodCount];
- int x = v + 8;
- for (j = 0; j < boostrapMethodCount; j++) {
- bootstrapMethods[j] = x;
- x += 2 + readUnsignedShort(x + 2) << 1;
+ int[] bootstrapMethods = new int[readUnsignedShort(u + 8)];
+ for (int j = 0, v = u + 10; j < bootstrapMethods.length; j++) {
+ bootstrapMethods[j] = v;
+ v += 2 + readUnsignedShort(v + 2) << 1;
}
+ context.bootstrapMethods = bootstrapMethods;
} else {
- attr = readAttribute(attrs,
- attrName,
- v + 6,
- readInt(v + 2),
- c,
- -1,
- null);
+ Attribute attr = readAttribute(attrs, attrName, u + 8,
+ readInt(u + 4), c, -1, null);
if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
+ attr.next = attributes;
+ attributes = attr;
}
}
- v += 6 + readInt(v + 2);
+ u += 6 + readInt(u + 4);
}
- // calls the visit method
- classVisitor.visit(readInt(4),
- access,
- name,
- signature,
- superClassName,
- implementedItfs);
-
- // calls the visitSource method
- if (!skipDebug && (sourceFile != null || sourceDebug != null)) {
+
+ // visits the class declaration
+ classVisitor.visit(readInt(items[1] - 7), access, name, signature,
+ superClass, interfaces);
+
+ // visits the source and debug info
+ if ((flags & SKIP_DEBUG) == 0
+ && (sourceFile != null || sourceDebug != null)) {
classVisitor.visitSource(sourceFile, sourceDebug);
}
- // calls the visitOuterClass method
+ // visits the outer class
if (enclosingOwner != null) {
- classVisitor.visitOuterClass(enclosingOwner,
- enclosingName,
+ classVisitor.visitOuterClass(enclosingOwner, enclosingName,
enclosingDesc);
}
// visits the class annotations
- if (ANNOTATIONS) {
- for (i = 1; i >= 0; --i) {
- v = i == 0 ? ianns : anns;
- if (v != 0) {
- j = readUnsignedShort(v);
- v += 2;
- for (; j > 0; --j) {
- v = readAnnotationValues(v + 2,
- c,
- true,
- classVisitor.visitAnnotation(readUTF8(v, c), i != 0));
- }
- }
+ if (ANNOTATIONS && anns != 0) {
+ for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ classVisitor.visitAnnotation(readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && ianns != 0) {
+ for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ classVisitor.visitAnnotation(readUTF8(v, c), false));
}
}
- // visits the class attributes
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- classVisitor.visitAttribute(cattrs);
- cattrs = attr;
+ // visits the attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ classVisitor.visitAttribute(attributes);
+ attributes = attr;
}
- // calls the visitInnerClass method
- if (w != 0) {
- i = readUnsignedShort(w);
- w += 2;
- for (; i > 0; --i) {
- classVisitor.visitInnerClass(readUnsignedShort(w) == 0
- ? null
- : readClass(w, c), readUnsignedShort(w + 2) == 0
- ? null
- : readClass(w + 2, c), readUnsignedShort(w + 4) == 0
- ? null
- : readUTF8(w + 4, c), readUnsignedShort(w + 6));
- w += 8;
+ // visits the inner classes
+ if (innerClasses != 0) {
+ int v = innerClasses + 2;
+ for (int i = readUnsignedShort(innerClasses); i > 0; --i) {
+ classVisitor.visitInnerClass(readClass(v, c),
+ readClass(v + 2, c), readUTF8(v + 4, c),
+ readUnsignedShort(v + 6));
+ v += 8;
}
}
- // visits the fields
- i = readUnsignedShort(u);
+ // visits the fields and methods
+ u = header + 10 + 2 * interfaces.length;
+ for (int i = readUnsignedShort(u - 2); i > 0; --i) {
+ u = readField(classVisitor, context, u);
+ }
u += 2;
- for (; i > 0; --i) {
- access = readUnsignedShort(u);
- name = readUTF8(u + 2, c);
- desc = readUTF8(u + 4, c);
- // visits the field's attributes and looks for a ConstantValue
- // attribute
- int fieldValueItem = 0;
- signature = null;
- anns = 0;
- ianns = 0;
- cattrs = null;
-
- j = readUnsignedShort(u + 6);
- u += 8;
- for (; j > 0; --j) {
- attrName = readUTF8(u, c);
- // tests are sorted in decreasing frequency order
- // (based on frequencies observed on typical classes)
- if ("ConstantValue".equals(attrName)) {
- fieldValueItem = readUnsignedShort(u + 6);
- } else if (SIGNATURES && "Signature".equals(attrName)) {
- signature = readUTF8(u + 6, c);
- } else if ("Deprecated".equals(attrName)) {
- access |= Opcodes.ACC_DEPRECATED;
- } else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
- } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
- anns = u + 6;
- } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
- ianns = u + 6;
- } else {
- attr = readAttribute(attrs,
- attrName,
- u + 6,
- readInt(u + 2),
- c,
- -1,
- null);
- if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
- }
+ for (int i = readUnsignedShort(u - 2); i > 0; --i) {
+ u = readMethod(classVisitor, context, u);
+ }
+
+ // visits the end of the class
+ classVisitor.visitEnd();
+ }
+
+ /**
+ * Reads a field and makes the given visitor visit it.
+ *
+ * @param classVisitor
+ * the visitor that must visit the field.
+ * @param context
+ * information about the class being parsed.
+ * @param u
+ * the start offset of the field in the class file.
+ * @return the offset of the first byte following the field in the class.
+ */
+ private int readField(final ClassVisitor classVisitor,
+ final Context context, int u) {
+ // reads the field declaration
+ char[] c = context.buffer;
+ int access = readUnsignedShort(u);
+ String name = readUTF8(u + 2, c);
+ String desc = readUTF8(u + 4, c);
+ u += 6;
+
+ // reads the field attributes
+ String signature = null;
+ int anns = 0;
+ int ianns = 0;
+ Object value = null;
+ Attribute attributes = null;
+
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("ConstantValue".equals(attrName)) {
+ int item = readUnsignedShort(u + 8);
+ value = item == 0 ? null : readConst(item, c);
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u + 8, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 8;
+ } else {
+ Attribute attr = readAttribute(context.attrs, attrName, u + 8,
+ readInt(u + 4), c, -1, null);
+ if (attr != null) {
+ attr.next = attributes;
+ attributes = attr;
}
- u += 6 + readInt(u + 2);
}
- // visits the field
- FieldVisitor fv = classVisitor.visitField(access,
- name,
- desc,
- signature,
- fieldValueItem == 0 ? null : readConst(fieldValueItem, c));
- // visits the field annotations and attributes
- if (fv != null) {
- if (ANNOTATIONS) {
- for (j = 1; j >= 0; --j) {
- v = j == 0 ? ianns : anns;
- if (v != 0) {
- k = readUnsignedShort(v);
- v += 2;
- for (; k > 0; --k) {
- v = readAnnotationValues(v + 2,
- c,
- true,
- fv.visitAnnotation(readUTF8(v, c), j != 0));
- }
- }
- }
- }
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- fv.visitAttribute(cattrs);
- cattrs = attr;
- }
- fv.visitEnd();
+ u += 6 + readInt(u + 4);
+ }
+ u += 2;
+
+ // visits the field declaration
+ FieldVisitor fv = classVisitor.visitField(access, name, desc,
+ signature, value);
+ if (fv == null) {
+ return u;
+ }
+
+ // visits the field annotations
+ if (ANNOTATIONS && anns != 0) {
+ for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ fv.visitAnnotation(readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && ianns != 0) {
+ for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ fv.visitAnnotation(readUTF8(v, c), false));
}
}
- // visits the methods
- i = readUnsignedShort(u);
- u += 2;
- for (; i > 0; --i) {
- int u0 = u + 6;
- access = readUnsignedShort(u);
- name = readUTF8(u + 2, c);
- desc = readUTF8(u + 4, c);
- signature = null;
- anns = 0;
- ianns = 0;
- int dann = 0;
- int mpanns = 0;
- int impanns = 0;
- cattrs = null;
- v = 0;
- w = 0;
-
- // looks for Code and Exceptions attributes
- j = readUnsignedShort(u + 6);
- u += 8;
- for (; j > 0; --j) {
- attrName = readUTF8(u, c);
- int attrSize = readInt(u + 2);
- u += 6;
- // tests are sorted in decreasing frequency order
- // (based on frequencies observed on typical classes)
- if ("Code".equals(attrName)) {
- if (!skipCode) {
- v = u;
- }
- } else if ("Exceptions".equals(attrName)) {
- w = u;
- } else if (SIGNATURES && "Signature".equals(attrName)) {
- signature = readUTF8(u, c);
- } else if ("Deprecated".equals(attrName)) {
- access |= Opcodes.ACC_DEPRECATED;
- } else if (ANNOTATIONS && "RuntimeVisibleAnnotations".equals(attrName)) {
- anns = u;
- } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
- dann = u;
- } else if ("Synthetic".equals(attrName)) {
- access |= Opcodes.ACC_SYNTHETIC | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
- } else if (ANNOTATIONS && "RuntimeInvisibleAnnotations".equals(attrName)) {
- ianns = u;
- } else if (ANNOTATIONS && "RuntimeVisibleParameterAnnotations".equals(attrName))
- {
- mpanns = u;
- } else if (ANNOTATIONS && "RuntimeInvisibleParameterAnnotations".equals(attrName))
- {
- impanns = u;
- } else {
- attr = readAttribute(attrs,
- attrName,
- u,
- attrSize,
- c,
- -1,
- null);
- if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
- }
+ // visits the field attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ fv.visitAttribute(attributes);
+ attributes = attr;
+ }
+
+ // visits the end of the field
+ fv.visitEnd();
+
+ return u;
+ }
+
+ /**
+ * Reads a method and makes the given visitor visit it.
+ *
+ * @param classVisitor
+ * the visitor that must visit the method.
+ * @param context
+ * information about the class being parsed.
+ * @param u
+ * the start offset of the method in the class file.
+ * @return the offset of the first byte following the method in the class.
+ */
+ private int readMethod(final ClassVisitor classVisitor,
+ final Context context, int u) {
+ // reads the method declaration
+ char[] c = context.buffer;
+ int access = readUnsignedShort(u);
+ String name = readUTF8(u + 2, c);
+ String desc = readUTF8(u + 4, c);
+ u += 6;
+
+ // reads the method attributes
+ int code = 0;
+ int exception = 0;
+ String[] exceptions = null;
+ String signature = null;
+ int anns = 0;
+ int ianns = 0;
+ int dann = 0;
+ int mpanns = 0;
+ int impanns = 0;
+ int firstAttribute = u;
+ Attribute attributes = null;
+
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ // tests are sorted in decreasing frequency order
+ // (based on frequencies observed on typical classes)
+ if ("Code".equals(attrName)) {
+ if ((context.flags & SKIP_CODE) == 0) {
+ code = u + 8;
}
- u += attrSize;
- }
- // reads declared exceptions
- String[] exceptions;
- if (w == 0) {
- exceptions = null;
+ } else if ("Exceptions".equals(attrName)) {
+ exceptions = new String[readUnsignedShort(u + 8)];
+ exception = u + 10;
+ for (int j = 0; j < exceptions.length; ++j) {
+ exceptions[j] = readClass(exception, c);
+ exception += 2;
+ }
+ } else if (SIGNATURES && "Signature".equals(attrName)) {
+ signature = readUTF8(u + 8, c);
+ } else if ("Deprecated".equals(attrName)) {
+ access |= Opcodes.ACC_DEPRECATED;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleAnnotations".equals(attrName)) {
+ anns = u + 8;
+ } else if (ANNOTATIONS && "AnnotationDefault".equals(attrName)) {
+ dann = u + 8;
+ } else if ("Synthetic".equals(attrName)) {
+ access |= Opcodes.ACC_SYNTHETIC
+ | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleAnnotations".equals(attrName)) {
+ ianns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeVisibleParameterAnnotations".equals(attrName)) {
+ mpanns = u + 8;
+ } else if (ANNOTATIONS
+ && "RuntimeInvisibleParameterAnnotations".equals(attrName)) {
+ impanns = u + 8;
} else {
- exceptions = new String[readUnsignedShort(w)];
- w += 2;
- for (j = 0; j < exceptions.length; ++j) {
- exceptions[j] = readClass(w, c);
- w += 2;
+ Attribute attr = readAttribute(context.attrs, attrName, u + 8,
+ readInt(u + 4), c, -1, null);
+ if (attr != null) {
+ attr.next = attributes;
+ attributes = attr;
}
}
+ u += 6 + readInt(u + 4);
+ }
+ u += 2;
- // visits the method's code, if any
- MethodVisitor mv = classVisitor.visitMethod(access,
- name,
- desc,
- signature,
- exceptions);
+ // visits the method declaration
+ MethodVisitor mv = classVisitor.visitMethod(access, name, desc,
+ signature, exceptions);
+ if (mv == null) {
+ return u;
+ }
- if (mv != null) {
- /*
- * if the returned MethodVisitor is in fact a MethodWriter, it
- * means there is no method adapter between the reader and the
- * writer. If, in addition, the writer's constant pool was
- * copied from this reader (mw.cw.cr == this), and the signature
- * and exceptions of the method have not been changed, then it
- * is possible to skip all visit events and just copy the
- * original code of the method to the writer (the access, name
- * and descriptor can have been changed, this is not important
- * since they are not copied as is from the reader).
- */
- if (WRITER && mv instanceof MethodWriter) {
- MethodWriter mw = (MethodWriter) mv;
- if (mw.cw.cr == this) {
- if (signature == mw.signature) {
- boolean sameExceptions = false;
- if (exceptions == null) {
- sameExceptions = mw.exceptionCount == 0;
- } else {
- if (exceptions.length == mw.exceptionCount) {
- sameExceptions = true;
- for (j = exceptions.length - 1; j >= 0; --j)
- {
- w -= 2;
- if (mw.exceptions[j] != readUnsignedShort(w))
- {
- sameExceptions = false;
- break;
- }
- }
- }
- }
- if (sameExceptions) {
- /*
- * we do not copy directly the code into
- * MethodWriter to save a byte array copy
- * operation. The real copy will be done in
- * ClassWriter.toByteArray().
- */
- mw.classReaderOffset = u0;
- mw.classReaderLength = u - u0;
- continue;
- }
+ /*
+ * if the returned MethodVisitor is in fact a MethodWriter, it means
+ * there is no method adapter between the reader and the writer. If, in
+ * addition, the writer's constant pool was copied from this reader
+ * (mw.cw.cr == this), and the signature and exceptions of the method
+ * have not been changed, then it is possible to skip all visit events
+ * and just copy the original code of the method to the writer (the
+ * access, name and descriptor can have been changed, this is not
+ * important since they are not copied as is from the reader).
+ */
+ if (WRITER && mv instanceof MethodWriter) {
+ MethodWriter mw = (MethodWriter) mv;
+ if (mw.cw.cr == this && signature == mw.signature) {
+ boolean sameExceptions = false;
+ if (exceptions == null) {
+ sameExceptions = mw.exceptionCount == 0;
+ } else if (exceptions.length == mw.exceptionCount) {
+ sameExceptions = true;
+ for (int j = exceptions.length - 1; j >= 0; --j) {
+ exception -= 2;
+ if (mw.exceptions[j] != readUnsignedShort(exception)) {
+ sameExceptions = false;
+ break;
}
}
}
-
- if (ANNOTATIONS && dann != 0) {
- AnnotationVisitor dv = mv.visitAnnotationDefault();
- readAnnotationValue(dann, c, null, dv);
- if (dv != null) {
- dv.visitEnd();
- }
- }
- if (ANNOTATIONS) {
- for (j = 1; j >= 0; --j) {
- w = j == 0 ? ianns : anns;
- if (w != 0) {
- k = readUnsignedShort(w);
- w += 2;
- for (; k > 0; --k) {
- w = readAnnotationValues(w + 2,
- c,
- true,
- mv.visitAnnotation(readUTF8(w, c), j != 0));
- }
- }
- }
+ if (sameExceptions) {
+ /*
+ * we do not copy directly the code into MethodWriter to
+ * save a byte array copy operation. The real copy will be
+ * done in ClassWriter.toByteArray().
+ */
+ mw.classReaderOffset = firstAttribute;
+ mw.classReaderLength = u - firstAttribute;
+ return u;
}
- if (ANNOTATIONS && mpanns != 0) {
- readParameterAnnotations(mpanns, desc, c, true, mv);
+ }
+ }
+
+ // visits the method annotations
+ if (ANNOTATIONS && dann != 0) {
+ AnnotationVisitor dv = mv.visitAnnotationDefault();
+ readAnnotationValue(dann, c, null, dv);
+ if (dv != null) {
+ dv.visitEnd();
+ }
+ }
+ if (ANNOTATIONS && anns != 0) {
+ for (int i = readUnsignedShort(anns), v = anns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitAnnotation(readUTF8(v, c), true));
+ }
+ }
+ if (ANNOTATIONS && ianns != 0) {
+ for (int i = readUnsignedShort(ianns), v = ianns + 2; i > 0; --i) {
+ v = readAnnotationValues(v + 2, c, true,
+ mv.visitAnnotation(readUTF8(v, c), false));
+ }
+ }
+ if (ANNOTATIONS && mpanns != 0) {
+ readParameterAnnotations(mpanns, desc, c, true, mv);
+ }
+ if (ANNOTATIONS && impanns != 0) {
+ readParameterAnnotations(impanns, desc, c, false, mv);
+ }
+
+ // visits the method attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ mv.visitAttribute(attributes);
+ attributes = attr;
+ }
+
+ // visits the method code
+ if (code != 0) {
+ context.access = access;
+ context.name = name;
+ context.desc = desc;
+ mv.visitCode();
+ readCode(mv, context, code);
+ }
+
+ // visits the end of the method
+ mv.visitEnd();
+
+ return u;
+ }
+
+ /**
+ * Reads the bytecode of a method and makes the given visitor visit it.
+ *
+ * @param mv
+ * the visitor that must visit the method's code.
+ * @param context
+ * information about the class being parsed.
+ * @param u
+ * the start offset of the code attribute in the class file.
+ */
+ private void readCode(final MethodVisitor mv, final Context context, int u) {
+ // reads the header
+ byte[] b = this.b;
+ char[] c = context.buffer;
+ int maxStack = readUnsignedShort(u);
+ int maxLocals = readUnsignedShort(u + 2);
+ int codeLength = readInt(u + 4);
+ u += 8;
+
+ // reads the bytecode to find the labels
+ int codeStart = u;
+ int codeEnd = u + codeLength;
+ Label[] labels = new Label[codeLength + 2];
+ readLabel(codeLength + 1, labels);
+ while (u < codeEnd) {
+ int offset = u - codeStart;
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ readLabel(offset + readShort(u + 1), labels);
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ readLabel(offset + readInt(u + 1), labels);
+ u += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ u += 6;
+ } else {
+ u += 4;
}
- if (ANNOTATIONS && impanns != 0) {
- readParameterAnnotations(impanns, desc, c, false, mv);
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ readLabel(offset + readInt(u), labels);
+ for (int i = readInt(u + 8) - readInt(u + 4) + 1; i > 0; --i) {
+ readLabel(offset + readInt(u + 12), labels);
+ u += 4;
}
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- mv.visitAttribute(cattrs);
- cattrs = attr;
+ u += 12;
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ readLabel(offset + readInt(u), labels);
+ for (int i = readInt(u + 4); i > 0; --i) {
+ readLabel(offset + readInt(u + 12), labels);
+ u += 8;
}
+ u += 8;
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ u += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ u += 4;
+ break;
}
+ }
- if (mv != null && v != 0) {
- int maxStack = readUnsignedShort(v);
- int maxLocals = readUnsignedShort(v + 2);
- int codeLength = readInt(v + 4);
- v += 8;
+ // reads the try catch entries to find the labels, and also visits them
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ Label start = readLabel(readUnsignedShort(u + 2), labels);
+ Label end = readLabel(readUnsignedShort(u + 4), labels);
+ Label handler = readLabel(readUnsignedShort(u + 6), labels);
+ String type = readUTF8(items[readUnsignedShort(u + 8)], c);
+ mv.visitTryCatchBlock(start, end, handler, type);
+ u += 8;
+ }
+ u += 2;
- int codeStart = v;
- int codeEnd = v + codeLength;
-
- mv.visitCode();
-
- // 1st phase: finds the labels
- int label;
- Label[] labels = new Label[codeLength + 2];
- readLabel(codeLength + 1, labels);
- while (v < codeEnd) {
- w = v - codeStart;
- int opcode = b[v] & 0xFF;
- switch (ClassWriter.TYPE[opcode]) {
- case ClassWriter.NOARG_INSN:
- case ClassWriter.IMPLVAR_INSN:
- v += 1;
- break;
- case ClassWriter.LABEL_INSN:
- readLabel(w + readShort(v + 1), labels);
- v += 3;
- break;
- case ClassWriter.LABELW_INSN:
- readLabel(w + readInt(v + 1), labels);
- v += 5;
- break;
- case ClassWriter.WIDE_INSN:
- opcode = b[v + 1] & 0xFF;
- if (opcode == Opcodes.IINC) {
- v += 6;
- } else {
- v += 4;
- }
- break;
- case ClassWriter.TABL_INSN:
- // skips 0 to 3 padding bytes*
- v = v + 4 - (w & 3);
- // reads instruction
- readLabel(w + readInt(v), labels);
- j = readInt(v + 8) - readInt(v + 4) + 1;
- v += 12;
- for (; j > 0; --j) {
- readLabel(w + readInt(v), labels);
- v += 4;
- }
- break;
- case ClassWriter.LOOK_INSN:
- // skips 0 to 3 padding bytes*
- v = v + 4 - (w & 3);
- // reads instruction
- readLabel(w + readInt(v), labels);
- j = readInt(v + 4);
- v += 8;
- for (; j > 0; --j) {
- readLabel(w + readInt(v + 4), labels);
- v += 8;
- }
- break;
- case ClassWriter.VAR_INSN:
- case ClassWriter.SBYTE_INSN:
- case ClassWriter.LDC_INSN:
- v += 2;
- break;
- case ClassWriter.SHORT_INSN:
- case ClassWriter.LDCW_INSN:
- case ClassWriter.FIELDORMETH_INSN:
- case ClassWriter.TYPE_INSN:
- case ClassWriter.IINC_INSN:
- v += 3;
- break;
- case ClassWriter.ITFMETH_INSN:
- case ClassWriter.INDYMETH_INSN:
- v += 5;
- break;
- // case MANA_INSN:
- default:
- v += 4;
- break;
- }
- }
- // parses the try catch entries
- j = readUnsignedShort(v);
- v += 2;
- for (; j > 0; --j) {
- Label start = readLabel(readUnsignedShort(v), labels);
- Label end = readLabel(readUnsignedShort(v + 2), labels);
- Label handler = readLabel(readUnsignedShort(v + 4), labels);
- int type = readUnsignedShort(v + 6);
- if (type == 0) {
- mv.visitTryCatchBlock(start, end, handler, null);
- } else {
- mv.visitTryCatchBlock(start,
- end,
- handler,
- readUTF8(items[type], c));
- }
- v += 8;
- }
- // parses the local variable, line number tables, and code
- // attributes
- int varTable = 0;
- int varTypeTable = 0;
- int stackMap = 0;
- int stackMapSize = 0;
- int frameCount = 0;
- int frameMode = 0;
- int frameOffset = 0;
- int frameLocalCount = 0;
- int frameLocalDiff = 0;
- int frameStackCount = 0;
- Object[] frameLocal = null;
- Object[] frameStack = null;
- boolean zip = true;
- cattrs = null;
- j = readUnsignedShort(v);
- v += 2;
- for (; j > 0; --j) {
- attrName = readUTF8(v, c);
- if ("LocalVariableTable".equals(attrName)) {
- if (!skipDebug) {
- varTable = v + 6;
- k = readUnsignedShort(v + 6);
- w = v + 8;
- for (; k > 0; --k) {
- label = readUnsignedShort(w);
- if (labels[label] == null) {
- readLabel(label, labels).status |= Label.DEBUG;
- }
- label += readUnsignedShort(w + 2);
- if (labels[label] == null) {
- readLabel(label, labels).status |= Label.DEBUG;
- }
- w += 10;
- }
- }
- } else if ("LocalVariableTypeTable".equals(attrName)) {
- varTypeTable = v + 6;
- } else if ("LineNumberTable".equals(attrName)) {
- if (!skipDebug) {
- k = readUnsignedShort(v + 6);
- w = v + 8;
- for (; k > 0; --k) {
- label = readUnsignedShort(w);
- if (labels[label] == null) {
- readLabel(label, labels).status |= Label.DEBUG;
- }
- labels[label].line = readUnsignedShort(w + 2);
- w += 4;
- }
- }
- } else if (FRAMES && "StackMapTable".equals(attrName)) {
- if ((flags & SKIP_FRAMES) == 0) {
- stackMap = v + 8;
- stackMapSize = readInt(v + 2);
- frameCount = readUnsignedShort(v + 6);
+ // reads the code attributes
+ int varTable = 0;
+ int varTypeTable = 0;
+ boolean zip = true;
+ boolean unzip = (context.flags & EXPAND_FRAMES) != 0;
+ int stackMap = 0;
+ int stackMapSize = 0;
+ int frameCount = 0;
+ Context frame = null;
+ Attribute attributes = null;
+
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ String attrName = readUTF8(u + 2, c);
+ if ("LocalVariableTable".equals(attrName)) {
+ if ((context.flags & SKIP_DEBUG) == 0) {
+ varTable = u + 8;
+ for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) {
+ int label = readUnsignedShort(v + 10);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
}
- /*
- * here we do not extract the labels corresponding to
- * the attribute content. This would require a full
- * parsing of the attribute, which would need to be
- * repeated in the second phase (see below). Instead the
- * content of the attribute is read one frame at a time
- * (i.e. after a frame has been visited, the next frame
- * is read), and the labels it contains are also
- * extracted one frame at a time. Thanks to the ordering
- * of frames, having only a "one frame lookahead" is not
- * a problem, i.e. it is not possible to see an offset
- * smaller than the offset of the current insn and for
- * which no Label exist.
- */
- /*
- * This is not true for UNINITIALIZED type offsets. We
- * solve this by parsing the stack map table without a
- * full decoding (see below).
- */
- } else if (FRAMES && "StackMap".equals(attrName)) {
- if ((flags & SKIP_FRAMES) == 0) {
- stackMap = v + 8;
- stackMapSize = readInt(v + 2);
- frameCount = readUnsignedShort(v + 6);
- zip = false;
- }
- /*
- * IMPORTANT! here we assume that the frames are
- * ordered, as in the StackMapTable attribute, although
- * this is not guaranteed by the attribute format.
- */
- } else {
- for (k = 0; k < attrs.length; ++k) {
- if (attrs[k].type.equals(attrName)) {
- attr = attrs[k].read(this,
- v + 6,
- readInt(v + 2),
- c,
- codeStart - 8,
- labels);
- if (attr != null) {
- attr.next = cattrs;
- cattrs = attr;
- }
- }
+ label += readUnsignedShort(v + 12);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
}
+ v += 10;
}
- v += 6 + readInt(v + 2);
}
-
- // 2nd phase: visits each instruction
- if (FRAMES && stackMap != 0) {
- // creates the very first (implicit) frame from the method
- // descriptor
- frameLocal = new Object[maxLocals];
- frameStack = new Object[maxStack];
- if (unzip) {
- int local = 0;
- if ((access & Opcodes.ACC_STATIC) == 0) {
- if ("<init>".equals(name)) {
- frameLocal[local++] = Opcodes.UNINITIALIZED_THIS;
- } else {
- frameLocal[local++] = readClass(header + 2, c);
- }
- }
- j = 1;
- loop: while (true) {
- k = j;
- switch (desc.charAt(j++)) {
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- frameLocal[local++] = Opcodes.INTEGER;
- break;
- case 'F':
- frameLocal[local++] = Opcodes.FLOAT;
- break;
- case 'J':
- frameLocal[local++] = Opcodes.LONG;
- break;
- case 'D':
- frameLocal[local++] = Opcodes.DOUBLE;
- break;
- case '[':
- while (desc.charAt(j) == '[') {
- ++j;
- }
- if (desc.charAt(j) == 'L') {
- ++j;
- while (desc.charAt(j) != ';') {
- ++j;
- }
- }
- frameLocal[local++] = desc.substring(k, ++j);
- break;
- case 'L':
- while (desc.charAt(j) != ';') {
- ++j;
- }
- frameLocal[local++] = desc.substring(k + 1,
- j++);
- break;
- default:
- break loop;
- }
+ } else if ("LocalVariableTypeTable".equals(attrName)) {
+ varTypeTable = u + 8;
+ } else if ("LineNumberTable".equals(attrName)) {
+ if ((context.flags & SKIP_DEBUG) == 0) {
+ for (int j = readUnsignedShort(u + 8), v = u; j > 0; --j) {
+ int label = readUnsignedShort(v + 10);
+ if (labels[label] == null) {
+ readLabel(label, labels).status |= Label.DEBUG;
}
- frameLocalCount = local;
+ labels[label].line = readUnsignedShort(v + 12);
+ v += 4;
}
- /*
- * for the first explicit frame the offset is not
- * offset_delta + 1 but only offset_delta; setting the
- * implicit frame offset to -1 allow the use of the
- * "offset_delta + 1" rule in all cases
- */
- frameOffset = -1;
- /*
- * Finds labels for UNINITIALIZED frame types. Instead of
- * decoding each element of the stack map table, we look
- * for 3 consecutive bytes that "look like" an UNINITIALIZED
- * type (tag 8, offset within code bounds, NEW instruction
- * at this offset). We may find false positives (i.e. not
- * real UNINITIALIZED types), but this should be rare, and
- * the only consequence will be the creation of an unneeded
- * label. This is better than creating a label for each NEW
- * instruction, and faster than fully decoding the whole
- * stack map table.
- */
- for (j = stackMap; j < stackMap + stackMapSize - 2; ++j) {
- if (b[j] == 8) { // UNINITIALIZED FRAME TYPE
- k = readUnsignedShort(j + 1);
- if (k >= 0 && k < codeLength) { // potential offset
- if ((b[codeStart + k] & 0xFF) == Opcodes.NEW) { // NEW at this offset
- readLabel(k, labels);
- }
- }
+ }
+ } else if (FRAMES && "StackMapTable".equals(attrName)) {
+ if ((context.flags & SKIP_FRAMES) == 0) {
+ stackMap = u + 10;
+ stackMapSize = readInt(u + 4);
+ frameCount = readUnsignedShort(u + 8);
+ }
+ /*
+ * here we do not extract the labels corresponding to the
+ * attribute content. This would require a full parsing of the
+ * attribute, which would need to be repeated in the second
+ * phase (see below). Instead the content of the attribute is
+ * read one frame at a time (i.e. after a frame has been
+ * visited, the next frame is read), and the labels it contains
+ * are also extracted one frame at a time. Thanks to the
+ * ordering of frames, having only a "one frame lookahead" is
+ * not a problem, i.e. it is not possible to see an offset
+ * smaller than the offset of the current insn and for which no
+ * Label exist.
+ */
+ /*
+ * This is not true for UNINITIALIZED type offsets. We solve
+ * this by parsing the stack map table without a full decoding
+ * (see below).
+ */
+ } else if (FRAMES && "StackMap".equals(attrName)) {
+ if ((context.flags & SKIP_FRAMES) == 0) {
+ zip = false;
+ stackMap = u + 10;
+ stackMapSize = readInt(u + 4);
+ frameCount = readUnsignedShort(u + 8);
+ }
+ /*
+ * IMPORTANT! here we assume that the frames are ordered, as in
+ * the StackMapTable attribute, although this is not guaranteed
+ * by the attribute format.
+ */
+ } else {
+ for (int j = 0; j < context.attrs.length; ++j) {
+ if (context.attrs[j].type.equals(attrName)) {
+ Attribute attr = context.attrs[j].read(this, u + 8,
+ readInt(u + 4), c, codeStart - 8, labels);
+ if (attr != null) {
+ attr.next = attributes;
+ attributes = attr;
}
}
}
- v = codeStart;
- Label l;
- while (v < codeEnd) {
- w = v - codeStart;
-
- l = labels[w];
- if (l != null) {
- mv.visitLabel(l);
- if (!skipDebug && l.line > 0) {
- mv.visitLineNumber(l.line, l);
+ }
+ u += 6 + readInt(u + 4);
+ }
+ u += 2;
+
+ // generates the first (implicit) stack map frame
+ if (FRAMES && stackMap != 0) {
+ /*
+ * for the first explicit frame the offset is not offset_delta + 1
+ * but only offset_delta; setting the implicit frame offset to -1
+ * allow the use of the "offset_delta + 1" rule in all cases
+ */
+ frame = context;
+ frame.offset = -1;
+ frame.mode = 0;
+ frame.localCount = 0;
+ frame.localDiff = 0;
+ frame.stackCount = 0;
+ frame.local = new Object[maxLocals];
+ frame.stack = new Object[maxStack];
+ if (unzip) {
+ getImplicitFrame(context);
+ }
+ /*
+ * Finds labels for UNINITIALIZED frame types. Instead of decoding
+ * each element of the stack map table, we look for 3 consecutive
+ * bytes that "look like" an UNINITIALIZED type (tag 8, offset
+ * within code bounds, NEW instruction at this offset). We may find
+ * false positives (i.e. not real UNINITIALIZED types), but this
+ * should be rare, and the only consequence will be the creation of
+ * an unneeded label. This is better than creating a label for each
+ * NEW instruction, and faster than fully decoding the whole stack
+ * map table.
+ */
+ for (int i = stackMap; i < stackMap + stackMapSize - 2; ++i) {
+ if (b[i] == 8) { // UNINITIALIZED FRAME TYPE
+ int v = readUnsignedShort(i + 1);
+ if (v >= 0 && v < codeLength) {
+ if ((b[codeStart + v] & 0xFF) == Opcodes.NEW) {
+ readLabel(v, labels);
}
}
+ }
+ }
+ }
- while (FRAMES && frameLocal != null
- && (frameOffset == w || frameOffset == -1))
- {
- // if there is a frame for this offset,
- // makes the visitor visit it,
- // and reads the next frame if there is one.
- if (!zip || unzip) {
- mv.visitFrame(Opcodes.F_NEW,
- frameLocalCount,
- frameLocal,
- frameStackCount,
- frameStack);
- } else if (frameOffset != -1) {
- mv.visitFrame(frameMode,
- frameLocalDiff,
- frameLocal,
- frameStackCount,
- frameStack);
- }
+ // visits the instructions
+ u = codeStart;
+ while (u < codeEnd) {
+ int offset = u - codeStart;
+
+ // visits the label and line number for this offset, if any
+ Label l = labels[offset];
+ if (l != null) {
+ mv.visitLabel(l);
+ if ((context.flags & SKIP_DEBUG) == 0 && l.line > 0) {
+ mv.visitLineNumber(l.line, l);
+ }
+ }
- if (frameCount > 0) {
- int tag, delta, n;
- if (zip) {
- tag = b[stackMap++] & 0xFF;
- } else {
- tag = MethodWriter.FULL_FRAME;
- frameOffset = -1;
- }
- frameLocalDiff = 0;
- if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME)
- {
- delta = tag;
- frameMode = Opcodes.F_SAME;
- frameStackCount = 0;
- } else if (tag < MethodWriter.RESERVED) {
- delta = tag
- - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
- stackMap = readFrameType(frameStack,
- 0,
- stackMap,
- c,
- labels);
- frameMode = Opcodes.F_SAME1;
- frameStackCount = 1;
- } else {
- delta = readUnsignedShort(stackMap);
- stackMap += 2;
- if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
- {
- stackMap = readFrameType(frameStack,
- 0,
- stackMap,
- c,
- labels);
- frameMode = Opcodes.F_SAME1;
- frameStackCount = 1;
- } else if (tag >= MethodWriter.CHOP_FRAME
- && tag < MethodWriter.SAME_FRAME_EXTENDED)
- {
- frameMode = Opcodes.F_CHOP;
- frameLocalDiff = MethodWriter.SAME_FRAME_EXTENDED
- - tag;
- frameLocalCount -= frameLocalDiff;
- frameStackCount = 0;
- } else if (tag == MethodWriter.SAME_FRAME_EXTENDED)
- {
- frameMode = Opcodes.F_SAME;
- frameStackCount = 0;
- } else if (tag < MethodWriter.FULL_FRAME) {
- j = unzip ? frameLocalCount : 0;
- for (k = tag
- - MethodWriter.SAME_FRAME_EXTENDED; k > 0; k--)
- {
- stackMap = readFrameType(frameLocal,
- j++,
- stackMap,
- c,
- labels);
- }
- frameMode = Opcodes.F_APPEND;
- frameLocalDiff = tag
- - MethodWriter.SAME_FRAME_EXTENDED;
- frameLocalCount += frameLocalDiff;
- frameStackCount = 0;
- } else { // if (tag == FULL_FRAME) {
- frameMode = Opcodes.F_FULL;
- n = frameLocalDiff = frameLocalCount = readUnsignedShort(stackMap);
- stackMap += 2;
- for (j = 0; n > 0; n--) {
- stackMap = readFrameType(frameLocal,
- j++,
- stackMap,
- c,
- labels);
- }
- n = frameStackCount = readUnsignedShort(stackMap);
- stackMap += 2;
- for (j = 0; n > 0; n--) {
- stackMap = readFrameType(frameStack,
- j++,
- stackMap,
- c,
- labels);
- }
- }
- }
- frameOffset += delta + 1;
- readLabel(frameOffset, labels);
-
- --frameCount;
- } else {
- frameLocal = null;
- }
+ // visits the frame for this offset, if any
+ while (FRAMES && frame != null
+ && (frame.offset == offset || frame.offset == -1)) {
+ // if there is a frame for this offset, makes the visitor visit
+ // it, and reads the next frame if there is one.
+ if (frame.offset != -1) {
+ if (!zip || unzip) {
+ mv.visitFrame(Opcodes.F_NEW, frame.localCount,
+ frame.local, frame.stackCount, frame.stack);
+ } else {
+ mv.visitFrame(frame.mode, frame.localDiff, frame.local,
+ frame.stackCount, frame.stack);
}
+ }
+ if (frameCount > 0) {
+ stackMap = readFrame(stackMap, zip, unzip, labels, frame);
+ --frameCount;
+ } else {
+ frame = null;
+ }
+ }
- int opcode = b[v] & 0xFF;
- switch (ClassWriter.TYPE[opcode]) {
- case ClassWriter.NOARG_INSN:
- mv.visitInsn(opcode);
- v += 1;
- break;
- case ClassWriter.IMPLVAR_INSN:
- if (opcode > Opcodes.ISTORE) {
- opcode -= 59; // ISTORE_0
- mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
- opcode & 0x3);
- } else {
- opcode -= 26; // ILOAD_0
- mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2),
- opcode & 0x3);
- }
- v += 1;
- break;
- case ClassWriter.LABEL_INSN:
- mv.visitJumpInsn(opcode, labels[w
- + readShort(v + 1)]);
- v += 3;
- break;
- case ClassWriter.LABELW_INSN:
- mv.visitJumpInsn(opcode - 33, labels[w
- + readInt(v + 1)]);
- v += 5;
- break;
- case ClassWriter.WIDE_INSN:
- opcode = b[v + 1] & 0xFF;
- if (opcode == Opcodes.IINC) {
- mv.visitIincInsn(readUnsignedShort(v + 2),
- readShort(v + 4));
- v += 6;
- } else {
- mv.visitVarInsn(opcode,
- readUnsignedShort(v + 2));
- v += 4;
- }
- break;
- case ClassWriter.TABL_INSN:
- // skips 0 to 3 padding bytes
- v = v + 4 - (w & 3);
- // reads instruction
- label = w + readInt(v);
- int min = readInt(v + 4);
- int max = readInt(v + 8);
- v += 12;
- Label[] table = new Label[max - min + 1];
- for (j = 0; j < table.length; ++j) {
- table[j] = labels[w + readInt(v)];
- v += 4;
- }
- mv.visitTableSwitchInsn(min,
- max,
- labels[label],
- table);
- break;
- case ClassWriter.LOOK_INSN:
- // skips 0 to 3 padding bytes
- v = v + 4 - (w & 3);
- // reads instruction
- label = w + readInt(v);
- j = readInt(v + 4);
- v += 8;
- int[] keys = new int[j];
- Label[] values = new Label[j];
- for (j = 0; j < keys.length; ++j) {
- keys[j] = readInt(v);
- values[j] = labels[w + readInt(v + 4)];
- v += 8;
- }
- mv.visitLookupSwitchInsn(labels[label],
- keys,
- values);
- break;
- case ClassWriter.VAR_INSN:
- mv.visitVarInsn(opcode, b[v + 1] & 0xFF);
- v += 2;
- break;
- case ClassWriter.SBYTE_INSN:
- mv.visitIntInsn(opcode, b[v + 1]);
- v += 2;
- break;
- case ClassWriter.SHORT_INSN:
- mv.visitIntInsn(opcode, readShort(v + 1));
- v += 3;
- break;
- case ClassWriter.LDC_INSN:
- mv.visitLdcInsn(readConst(b[v + 1] & 0xFF, c));
- v += 2;
- break;
- case ClassWriter.LDCW_INSN:
- mv.visitLdcInsn(readConst(readUnsignedShort(v + 1),
- c));
- v += 3;
- break;
- case ClassWriter.FIELDORMETH_INSN:
- case ClassWriter.ITFMETH_INSN: {
- int cpIndex = items[readUnsignedShort(v + 1)];
- String iowner = readClass(cpIndex, c);
- cpIndex = items[readUnsignedShort(cpIndex + 2)];
- String iname = readUTF8(cpIndex, c);
- String idesc = readUTF8(cpIndex + 2, c);
- if (opcode < Opcodes.INVOKEVIRTUAL) {
- mv.visitFieldInsn(opcode, iowner, iname, idesc);
- } else {
- mv.visitMethodInsn(opcode, iowner, iname, idesc);
- }
- if (opcode == Opcodes.INVOKEINTERFACE) {
- v += 5;
- } else {
- v += 3;
- }
- break;
- }
- case ClassWriter.INDYMETH_INSN: {
- int cpIndex = items[readUnsignedShort(v + 1)];
- int bsmIndex = bootstrapMethods[readUnsignedShort(cpIndex)];
- cpIndex = items[readUnsignedShort(cpIndex + 2)];
- String iname = readUTF8(cpIndex, c);
- String idesc = readUTF8(cpIndex + 2, c);
-
- int mhIndex = readUnsignedShort(bsmIndex);
- Handle bsm = (Handle) readConst(mhIndex, c);
- int bsmArgCount = readUnsignedShort(bsmIndex + 2);
- Object[] bsmArgs = new Object[bsmArgCount];
- bsmIndex += 4;
- for(int a = 0; a < bsmArgCount; a++) {
- int argIndex = readUnsignedShort(bsmIndex);
- bsmArgs[a] = readConst(argIndex, c);
- bsmIndex += 2;
- }
- mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
-
- v += 5;
- break;
- }
- case ClassWriter.TYPE_INSN:
- mv.visitTypeInsn(opcode, readClass(v + 1, c));
- v += 3;
- break;
- case ClassWriter.IINC_INSN:
- mv.visitIincInsn(b[v + 1] & 0xFF, b[v + 2]);
- v += 3;
- break;
- // case MANA_INSN:
- default:
- mv.visitMultiANewArrayInsn(readClass(v + 1, c),
- b[v + 3] & 0xFF);
- v += 4;
- break;
- }
+ // visits the instruction at this offset
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ mv.visitInsn(opcode);
+ u += 1;
+ break;
+ case ClassWriter.IMPLVAR_INSN:
+ if (opcode > Opcodes.ISTORE) {
+ opcode -= 59; // ISTORE_0
+ mv.visitVarInsn(Opcodes.ISTORE + (opcode >> 2),
+ opcode & 0x3);
+ } else {
+ opcode -= 26; // ILOAD_0
+ mv.visitVarInsn(Opcodes.ILOAD + (opcode >> 2), opcode & 0x3);
}
- l = labels[codeEnd - codeStart];
- if (l != null) {
- mv.visitLabel(l);
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ mv.visitJumpInsn(opcode, labels[offset + readShort(u + 1)]);
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ mv.visitJumpInsn(opcode - 33, labels[offset + readInt(u + 1)]);
+ u += 5;
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ mv.visitIincInsn(readUnsignedShort(u + 2), readShort(u + 4));
+ u += 6;
+ } else {
+ mv.visitVarInsn(opcode, readUnsignedShort(u + 2));
+ u += 4;
}
- // visits the local variable tables
- if (!skipDebug && varTable != 0) {
- int[] typeTable = null;
- if (varTypeTable != 0) {
- k = readUnsignedShort(varTypeTable) * 3;
- w = varTypeTable + 2;
- typeTable = new int[k];
- while (k > 0) {
- typeTable[--k] = w + 6; // signature
- typeTable[--k] = readUnsignedShort(w + 8); // index
- typeTable[--k] = readUnsignedShort(w); // start
- w += 10;
- }
- }
- k = readUnsignedShort(varTable);
- w = varTable + 2;
- for (; k > 0; --k) {
- int start = readUnsignedShort(w);
- int length = readUnsignedShort(w + 2);
- int index = readUnsignedShort(w + 8);
- String vsignature = null;
- if (typeTable != null) {
- for (int a = 0; a < typeTable.length; a += 3) {
- if (typeTable[a] == start
- && typeTable[a + 1] == index)
- {
- vsignature = readUTF8(typeTable[a + 2], c);
- break;
- }
- }
- }
- mv.visitLocalVariable(readUTF8(w + 4, c),
- readUTF8(w + 6, c),
- vsignature,
- labels[start],
- labels[start + length],
- index);
- w += 10;
- }
+ break;
+ case ClassWriter.TABL_INSN: {
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ int label = offset + readInt(u);
+ int min = readInt(u + 4);
+ int max = readInt(u + 8);
+ Label[] table = new Label[max - min + 1];
+ u += 12;
+ for (int i = 0; i < table.length; ++i) {
+ table[i] = labels[offset + readInt(u)];
+ u += 4;
+ }
+ mv.visitTableSwitchInsn(min, max, labels[label], table);
+ break;
+ }
+ case ClassWriter.LOOK_INSN: {
+ // skips 0 to 3 padding bytes
+ u = u + 4 - (offset & 3);
+ // reads instruction
+ int label = offset + readInt(u);
+ int len = readInt(u + 4);
+ int[] keys = new int[len];
+ Label[] values = new Label[len];
+ u += 8;
+ for (int i = 0; i < len; ++i) {
+ keys[i] = readInt(u);
+ values[i] = labels[offset + readInt(u + 4)];
+ u += 8;
+ }
+ mv.visitLookupSwitchInsn(labels[label], keys, values);
+ break;
+ }
+ case ClassWriter.VAR_INSN:
+ mv.visitVarInsn(opcode, b[u + 1] & 0xFF);
+ u += 2;
+ break;
+ case ClassWriter.SBYTE_INSN:
+ mv.visitIntInsn(opcode, b[u + 1]);
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ mv.visitIntInsn(opcode, readShort(u + 1));
+ u += 3;
+ break;
+ case ClassWriter.LDC_INSN:
+ mv.visitLdcInsn(readConst(b[u + 1] & 0xFF, c));
+ u += 2;
+ break;
+ case ClassWriter.LDCW_INSN:
+ mv.visitLdcInsn(readConst(readUnsignedShort(u + 1), c));
+ u += 3;
+ break;
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.ITFMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(u + 1)];
+ String iowner = readClass(cpIndex, c);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+ if (opcode < Opcodes.INVOKEVIRTUAL) {
+ mv.visitFieldInsn(opcode, iowner, iname, idesc);
+ } else {
+ mv.visitMethodInsn(opcode, iowner, iname, idesc);
}
- // visits the other attributes
- while (cattrs != null) {
- attr = cattrs.next;
- cattrs.next = null;
- mv.visitAttribute(cattrs);
- cattrs = attr;
+ if (opcode == Opcodes.INVOKEINTERFACE) {
+ u += 5;
+ } else {
+ u += 3;
}
- // visits the max stack and max locals values
- mv.visitMaxs(maxStack, maxLocals);
+ break;
}
+ case ClassWriter.INDYMETH_INSN: {
+ int cpIndex = items[readUnsignedShort(u + 1)];
+ int bsmIndex = context.bootstrapMethods[readUnsignedShort(cpIndex)];
+ Handle bsm = (Handle) readConst(readUnsignedShort(bsmIndex), c);
+ int bsmArgCount = readUnsignedShort(bsmIndex + 2);
+ Object[] bsmArgs = new Object[bsmArgCount];
+ bsmIndex += 4;
+ for (int i = 0; i < bsmArgCount; i++) {
+ bsmArgs[i] = readConst(readUnsignedShort(bsmIndex), c);
+ bsmIndex += 2;
+ }
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String iname = readUTF8(cpIndex, c);
+ String idesc = readUTF8(cpIndex + 2, c);
+ mv.visitInvokeDynamicInsn(iname, idesc, bsm, bsmArgs);
+ u += 5;
+ break;
+ }
+ case ClassWriter.TYPE_INSN:
+ mv.visitTypeInsn(opcode, readClass(u + 1, c));
+ u += 3;
+ break;
+ case ClassWriter.IINC_INSN:
+ mv.visitIincInsn(b[u + 1] & 0xFF, b[u + 2]);
+ u += 3;
+ break;
+ // case MANA_INSN:
+ default:
+ mv.visitMultiANewArrayInsn(readClass(u + 1, c), b[u + 3] & 0xFF);
+ u += 4;
+ break;
+ }
+ }
+ if (labels[codeLength] != null) {
+ mv.visitLabel(labels[codeLength]);
+ }
- if (mv != null) {
- mv.visitEnd();
+ // visits the local variable tables
+ if ((context.flags & SKIP_DEBUG) == 0 && varTable != 0) {
+ int[] typeTable = null;
+ if (varTypeTable != 0) {
+ u = varTypeTable + 2;
+ typeTable = new int[readUnsignedShort(varTypeTable) * 3];
+ for (int i = typeTable.length; i > 0;) {
+ typeTable[--i] = u + 6; // signature
+ typeTable[--i] = readUnsignedShort(u + 8); // index
+ typeTable[--i] = readUnsignedShort(u); // start
+ u += 10;
+ }
+ }
+ u = varTable + 2;
+ for (int i = readUnsignedShort(varTable); i > 0; --i) {
+ int start = readUnsignedShort(u);
+ int length = readUnsignedShort(u + 2);
+ int index = readUnsignedShort(u + 8);
+ String vsignature = null;
+ if (typeTable != null) {
+ for (int j = 0; j < typeTable.length; j += 3) {
+ if (typeTable[j] == start && typeTable[j + 1] == index) {
+ vsignature = readUTF8(typeTable[j + 2], c);
+ break;
+ }
+ }
+ }
+ mv.visitLocalVariable(readUTF8(u + 4, c), readUTF8(u + 6, c),
+ vsignature, labels[start], labels[start + length],
+ index);
+ u += 10;
}
}
- // visits the end of the class
- classVisitor.visitEnd();
+ // visits the code attributes
+ while (attributes != null) {
+ Attribute attr = attributes.next;
+ attributes.next = null;
+ mv.visitAttribute(attributes);
+ attributes = attr;
+ }
+
+ // visits the max stack and max locals values
+ mv.visitMaxs(maxStack, maxLocals);
}
/**
* Reads parameter annotations and makes the given visitor visit them.
*
- * @param v start offset in {@link #b b} of the annotations to be read.
- * @param desc the method descriptor.
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param visible <tt>true</tt> if the annotations to be read are visible
- * at runtime.
- * @param mv the visitor that must visit the annotations.
+ * @param v
+ * start offset in {@link #b b} of the annotations to be read.
+ * @param desc
+ * the method descriptor.
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param visible
+ * <tt>true</tt> if the annotations to be read are visible at
+ * runtime.
+ * @param mv
+ * the visitor that must visit the annotations.
*/
- private void readParameterAnnotations(
- int v,
- final String desc,
- final char[] buf,
- final boolean visible,
- final MethodVisitor mv)
- {
+ private void readParameterAnnotations(int v, final String desc,
+ final char[] buf, final boolean visible, final MethodVisitor mv) {
int i;
int n = b[v++] & 0xFF;
// workaround for a bug in javac (javac compiler generates a parameter
@@ -1679,21 +1458,22 @@ public class ClassReader {
/**
* Reads the values of an annotation and makes the given visitor visit them.
*
- * @param v the start offset in {@link #b b} of the values to be read
- * (including the unsigned short that gives the number of values).
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param named if the annotation values are named or not.
- * @param av the visitor that must visit the values.
+ * @param v
+ * the start offset in {@link #b b} of the values to be read
+ * (including the unsigned short that gives the number of
+ * values).
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param named
+ * if the annotation values are named or not.
+ * @param av
+ * the visitor that must visit the values.
* @return the end offset of the annotation values.
*/
- private int readAnnotationValues(
- int v,
- final char[] buf,
- final boolean named,
- final AnnotationVisitor av)
- {
+ private int readAnnotationValues(int v, final char[] buf,
+ final boolean named, final AnnotationVisitor av) {
int i = readUnsignedShort(v);
v += 2;
if (named) {
@@ -1714,210 +1494,371 @@ public class ClassReader {
/**
* Reads a value of an annotation and makes the given visitor visit it.
*
- * @param v the start offset in {@link #b b} of the value to be read (<i>not
- * including the value name constant pool index</i>).
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param name the name of the value to be read.
- * @param av the visitor that must visit the value.
+ * @param v
+ * the start offset in {@link #b b} of the value to be read
+ * (<i>not including the value name constant pool index</i>).
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param name
+ * the name of the value to be read.
+ * @param av
+ * the visitor that must visit the value.
* @return the end offset of the annotation value.
*/
- private int readAnnotationValue(
- int v,
- final char[] buf,
- final String name,
- final AnnotationVisitor av)
- {
+ private int readAnnotationValue(int v, final char[] buf, final String name,
+ final AnnotationVisitor av) {
int i;
if (av == null) {
switch (b[v] & 0xFF) {
- case 'e': // enum_const_value
- return v + 5;
- case '@': // annotation_value
- return readAnnotationValues(v + 3, buf, true, null);
- case '[': // array_value
- return readAnnotationValues(v + 1, buf, false, null);
- default:
- return v + 3;
+ case 'e': // enum_const_value
+ return v + 5;
+ case '@': // annotation_value
+ return readAnnotationValues(v + 3, buf, true, null);
+ case '[': // array_value
+ return readAnnotationValues(v + 1, buf, false, null);
+ default:
+ return v + 3;
}
}
switch (b[v++] & 0xFF) {
- case 'I': // pointer to CONSTANT_Integer
- case 'J': // pointer to CONSTANT_Long
- case 'F': // pointer to CONSTANT_Float
- case 'D': // pointer to CONSTANT_Double
- av.visit(name, readConst(readUnsignedShort(v), buf));
- v += 2;
- break;
- case 'B': // pointer to CONSTANT_Byte
- av.visit(name,
- new Byte((byte) readInt(items[readUnsignedShort(v)])));
- v += 2;
- break;
- case 'Z': // pointer to CONSTANT_Boolean
- av.visit(name, readInt(items[readUnsignedShort(v)]) == 0
- ? Boolean.FALSE
- : Boolean.TRUE);
- v += 2;
- break;
- case 'S': // pointer to CONSTANT_Short
- av.visit(name,
- new Short((short) readInt(items[readUnsignedShort(v)])));
- v += 2;
+ case 'I': // pointer to CONSTANT_Integer
+ case 'J': // pointer to CONSTANT_Long
+ case 'F': // pointer to CONSTANT_Float
+ case 'D': // pointer to CONSTANT_Double
+ av.visit(name, readConst(readUnsignedShort(v), buf));
+ v += 2;
+ break;
+ case 'B': // pointer to CONSTANT_Byte
+ av.visit(name,
+ new Byte((byte) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'Z': // pointer to CONSTANT_Boolean
+ av.visit(name,
+ readInt(items[readUnsignedShort(v)]) == 0 ? Boolean.FALSE
+ : Boolean.TRUE);
+ v += 2;
+ break;
+ case 'S': // pointer to CONSTANT_Short
+ av.visit(name, new Short(
+ (short) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 'C': // pointer to CONSTANT_Char
+ av.visit(name, new Character(
+ (char) readInt(items[readUnsignedShort(v)])));
+ v += 2;
+ break;
+ case 's': // pointer to CONSTANT_Utf8
+ av.visit(name, readUTF8(v, buf));
+ v += 2;
+ break;
+ case 'e': // enum_const_value
+ av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
+ v += 4;
+ break;
+ case 'c': // class_info
+ av.visit(name, Type.getType(readUTF8(v, buf)));
+ v += 2;
+ break;
+ case '@': // annotation_value
+ v = readAnnotationValues(v + 2, buf, true,
+ av.visitAnnotation(name, readUTF8(v, buf)));
+ break;
+ case '[': // array_value
+ int size = readUnsignedShort(v);
+ v += 2;
+ if (size == 0) {
+ return readAnnotationValues(v - 2, buf, false,
+ av.visitArray(name));
+ }
+ switch (this.b[v++] & 0xFF) {
+ case 'B':
+ byte[] bv = new byte[size];
+ for (i = 0; i < size; i++) {
+ bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, bv);
+ --v;
break;
- case 'C': // pointer to CONSTANT_Char
- av.visit(name,
- new Character((char) readInt(items[readUnsignedShort(v)])));
- v += 2;
+ case 'Z':
+ boolean[] zv = new boolean[size];
+ for (i = 0; i < size; i++) {
+ zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
+ v += 3;
+ }
+ av.visit(name, zv);
+ --v;
break;
- case 's': // pointer to CONSTANT_Utf8
- av.visit(name, readUTF8(v, buf));
- v += 2;
+ case 'S':
+ short[] sv = new short[size];
+ for (i = 0; i < size; i++) {
+ sv[i] = (short) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, sv);
+ --v;
break;
- case 'e': // enum_const_value
- av.visitEnum(name, readUTF8(v, buf), readUTF8(v + 2, buf));
- v += 4;
+ case 'C':
+ char[] cv = new char[size];
+ for (i = 0; i < size; i++) {
+ cv[i] = (char) readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, cv);
+ --v;
break;
- case 'c': // class_info
- av.visit(name, Type.getType(readUTF8(v, buf)));
- v += 2;
+ case 'I':
+ int[] iv = new int[size];
+ for (i = 0; i < size; i++) {
+ iv[i] = readInt(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, iv);
+ --v;
break;
- case '@': // annotation_value
- v = readAnnotationValues(v + 2,
- buf,
- true,
- av.visitAnnotation(name, readUTF8(v, buf)));
+ case 'J':
+ long[] lv = new long[size];
+ for (i = 0; i < size; i++) {
+ lv[i] = readLong(items[readUnsignedShort(v)]);
+ v += 3;
+ }
+ av.visit(name, lv);
+ --v;
break;
- case '[': // array_value
- int size = readUnsignedShort(v);
- v += 2;
- if (size == 0) {
- return readAnnotationValues(v - 2,
- buf,
- false,
- av.visitArray(name));
+ case 'F':
+ float[] fv = new float[size];
+ for (i = 0; i < size; i++) {
+ fv[i] = Float
+ .intBitsToFloat(readInt(items[readUnsignedShort(v)]));
+ v += 3;
}
- switch (this.b[v++] & 0xFF) {
- case 'B':
- byte[] bv = new byte[size];
- for (i = 0; i < size; i++) {
- bv[i] = (byte) readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, bv);
- --v;
- break;
- case 'Z':
- boolean[] zv = new boolean[size];
- for (i = 0; i < size; i++) {
- zv[i] = readInt(items[readUnsignedShort(v)]) != 0;
- v += 3;
- }
- av.visit(name, zv);
- --v;
- break;
- case 'S':
- short[] sv = new short[size];
- for (i = 0; i < size; i++) {
- sv[i] = (short) readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, sv);
- --v;
- break;
- case 'C':
- char[] cv = new char[size];
- for (i = 0; i < size; i++) {
- cv[i] = (char) readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, cv);
- --v;
- break;
- case 'I':
- int[] iv = new int[size];
- for (i = 0; i < size; i++) {
- iv[i] = readInt(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, iv);
- --v;
- break;
- case 'J':
- long[] lv = new long[size];
- for (i = 0; i < size; i++) {
- lv[i] = readLong(items[readUnsignedShort(v)]);
- v += 3;
- }
- av.visit(name, lv);
- --v;
- break;
- case 'F':
- float[] fv = new float[size];
- for (i = 0; i < size; i++) {
- fv[i] = Float.intBitsToFloat(readInt(items[readUnsignedShort(v)]));
- v += 3;
- }
- av.visit(name, fv);
- --v;
- break;
- case 'D':
- double[] dv = new double[size];
- for (i = 0; i < size; i++) {
- dv[i] = Double.longBitsToDouble(readLong(items[readUnsignedShort(v)]));
- v += 3;
- }
- av.visit(name, dv);
- --v;
- break;
- default:
- v = readAnnotationValues(v - 3,
- buf,
- false,
- av.visitArray(name));
+ av.visit(name, fv);
+ --v;
+ break;
+ case 'D':
+ double[] dv = new double[size];
+ for (i = 0; i < size; i++) {
+ dv[i] = Double
+ .longBitsToDouble(readLong(items[readUnsignedShort(v)]));
+ v += 3;
}
+ av.visit(name, dv);
+ --v;
+ break;
+ default:
+ v = readAnnotationValues(v - 3, buf, false, av.visitArray(name));
+ }
}
return v;
}
- private int readFrameType(
- final Object[] frame,
- final int index,
- int v,
- final char[] buf,
- final Label[] labels)
- {
- int type = b[v++] & 0xFF;
- switch (type) {
- case 0:
- frame[index] = Opcodes.TOP;
- break;
- case 1:
- frame[index] = Opcodes.INTEGER;
- break;
- case 2:
- frame[index] = Opcodes.FLOAT;
+ /**
+ * Computes the implicit frame of the method currently being parsed (as
+ * defined in the given {@link Context}) and stores it in the given context.
+ *
+ * @param frame
+ * information about the class being parsed.
+ */
+ private void getImplicitFrame(final Context frame) {
+ String desc = frame.desc;
+ Object[] locals = frame.local;
+ int local = 0;
+ if ((frame.access & Opcodes.ACC_STATIC) == 0) {
+ if ("<init>".equals(frame.name)) {
+ locals[local++] = Opcodes.UNINITIALIZED_THIS;
+ } else {
+ locals[local++] = readClass(header + 2, frame.buffer);
+ }
+ }
+ int i = 1;
+ loop: while (true) {
+ int j = i;
+ switch (desc.charAt(i++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ locals[local++] = Opcodes.INTEGER;
break;
- case 3:
- frame[index] = Opcodes.DOUBLE;
+ case 'F':
+ locals[local++] = Opcodes.FLOAT;
break;
- case 4:
- frame[index] = Opcodes.LONG;
+ case 'J':
+ locals[local++] = Opcodes.LONG;
break;
- case 5:
- frame[index] = Opcodes.NULL;
+ case 'D':
+ locals[local++] = Opcodes.DOUBLE;
break;
- case 6:
- frame[index] = Opcodes.UNINITIALIZED_THIS;
+ case '[':
+ while (desc.charAt(i) == '[') {
+ ++i;
+ }
+ if (desc.charAt(i) == 'L') {
+ ++i;
+ while (desc.charAt(i) != ';') {
+ ++i;
+ }
+ }
+ locals[local++] = desc.substring(j, ++i);
break;
- case 7: // Object
- frame[index] = readClass(v, buf);
- v += 2;
+ case 'L':
+ while (desc.charAt(i) != ';') {
+ ++i;
+ }
+ locals[local++] = desc.substring(j + 1, i++);
break;
- default: // Uninitialized
- frame[index] = readLabel(readUnsignedShort(v), labels);
- v += 2;
+ default:
+ break loop;
+ }
+ }
+ frame.localCount = local;
+ }
+
+ /**
+ * Reads a stack map frame and stores the result in the given
+ * {@link Context} object.
+ *
+ * @param stackMap
+ * the start offset of a stack map frame in the class file.
+ * @param zip
+ * if the stack map frame at stackMap is compressed or not.
+ * @param unzip
+ * if the stack map frame must be uncompressed.
+ * @param labels
+ * the labels of the method currently being parsed, indexed by
+ * their offset. A new label for the parsed stack map frame is
+ * stored in this array if it does not already exist.
+ * @param frame
+ * where the parsed stack map frame must be stored.
+ * @return the offset of the first byte following the parsed frame.
+ */
+ private int readFrame(int stackMap, boolean zip, boolean unzip,
+ Label[] labels, Context frame) {
+ char[] c = frame.buffer;
+ int tag;
+ int delta;
+ if (zip) {
+ tag = b[stackMap++] & 0xFF;
+ } else {
+ tag = MethodWriter.FULL_FRAME;
+ frame.offset = -1;
+ }
+ frame.localDiff = 0;
+ if (tag < MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME) {
+ delta = tag;
+ frame.mode = Opcodes.F_SAME;
+ frame.stackCount = 0;
+ } else if (tag < MethodWriter.RESERVED) {
+ delta = tag - MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME;
+ stackMap = readFrameType(frame.stack, 0, stackMap, c, labels);
+ frame.mode = Opcodes.F_SAME1;
+ frame.stackCount = 1;
+ } else {
+ delta = readUnsignedShort(stackMap);
+ stackMap += 2;
+ if (tag == MethodWriter.SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED) {
+ stackMap = readFrameType(frame.stack, 0, stackMap, c, labels);
+ frame.mode = Opcodes.F_SAME1;
+ frame.stackCount = 1;
+ } else if (tag >= MethodWriter.CHOP_FRAME
+ && tag < MethodWriter.SAME_FRAME_EXTENDED) {
+ frame.mode = Opcodes.F_CHOP;
+ frame.localDiff = MethodWriter.SAME_FRAME_EXTENDED - tag;
+ frame.localCount -= frame.localDiff;
+ frame.stackCount = 0;
+ } else if (tag == MethodWriter.SAME_FRAME_EXTENDED) {
+ frame.mode = Opcodes.F_SAME;
+ frame.stackCount = 0;
+ } else if (tag < MethodWriter.FULL_FRAME) {
+ int local = unzip ? frame.localCount : 0;
+ for (int i = tag - MethodWriter.SAME_FRAME_EXTENDED; i > 0; i--) {
+ stackMap = readFrameType(frame.local, local++, stackMap, c,
+ labels);
+ }
+ frame.mode = Opcodes.F_APPEND;
+ frame.localDiff = tag - MethodWriter.SAME_FRAME_EXTENDED;
+ frame.localCount += frame.localDiff;
+ frame.stackCount = 0;
+ } else { // if (tag == FULL_FRAME) {
+ frame.mode = Opcodes.F_FULL;
+ int n = readUnsignedShort(stackMap);
+ stackMap += 2;
+ frame.localDiff = n;
+ frame.localCount = n;
+ for (int local = 0; n > 0; n--) {
+ stackMap = readFrameType(frame.local, local++, stackMap, c,
+ labels);
+ }
+ n = readUnsignedShort(stackMap);
+ stackMap += 2;
+ frame.stackCount = n;
+ for (int stack = 0; n > 0; n--) {
+ stackMap = readFrameType(frame.stack, stack++, stackMap, c,
+ labels);
+ }
+ }
+ }
+ frame.offset += delta + 1;
+ readLabel(frame.offset, labels);
+ return stackMap;
+ }
+
+ /**
+ * Reads a stack map frame type and stores it at the given index in the
+ * given array.
+ *
+ * @param frame
+ * the array where the parsed type must be stored.
+ * @param index
+ * the index in 'frame' where the parsed type must be stored.
+ * @param v
+ * the start offset of the stack map frame type to read.
+ * @param buf
+ * a buffer to read strings.
+ * @param labels
+ * the labels of the method currently being parsed, indexed by
+ * their offset. If the parsed type is an Uninitialized type, a
+ * new label for the corresponding NEW instruction is stored in
+ * this array if it does not already exist.
+ * @return the offset of the first byte after the parsed type.
+ */
+ private int readFrameType(final Object[] frame, final int index, int v,
+ final char[] buf, final Label[] labels) {
+ int type = b[v++] & 0xFF;
+ switch (type) {
+ case 0:
+ frame[index] = Opcodes.TOP;
+ break;
+ case 1:
+ frame[index] = Opcodes.INTEGER;
+ break;
+ case 2:
+ frame[index] = Opcodes.FLOAT;
+ break;
+ case 3:
+ frame[index] = Opcodes.DOUBLE;
+ break;
+ case 4:
+ frame[index] = Opcodes.LONG;
+ break;
+ case 5:
+ frame[index] = Opcodes.NULL;
+ break;
+ case 6:
+ frame[index] = Opcodes.UNINITIALIZED_THIS;
+ break;
+ case 7: // Object
+ frame[index] = readClass(v, buf);
+ v += 2;
+ break;
+ default: // Uninitialized
+ frame[index] = readLabel(readUnsignedShort(v), labels);
+ v += 2;
}
return v;
}
@@ -1927,10 +1868,12 @@ public class ClassReader {
* implementation of this method creates a label for the given offset if it
* has not been already created.
*
- * @param offset a bytecode offset in a method.
- * @param labels the already created labels, indexed by their offset. If a
- * label already exists for offset this method must not create a new
- * one. Otherwise it must store the new label in this array.
+ * @param offset
+ * a bytecode offset in a method.
+ * @param labels
+ * the already created labels, indexed by their offset. If a
+ * label already exists for offset this method must not create a
+ * new one. Otherwise it must store the new label in this array.
* @return a non null Label, which must be equal to labels[offset].
*/
protected Label readLabel(int offset, Label[] labels) {
@@ -1941,39 +1884,67 @@ public class ClassReader {
}
/**
+ * Returns the start index of the attribute_info structure of this class.
+ *
+ * @return the start index of the attribute_info structure of this class.
+ */
+ private int getAttributes() {
+ // skips the header
+ int u = header + 8 + readUnsignedShort(header + 6) * 2;
+ // skips fields and methods
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ for (int j = readUnsignedShort(u + 8); j > 0; --j) {
+ u += 6 + readInt(u + 12);
+ }
+ u += 8;
+ }
+ u += 2;
+ for (int i = readUnsignedShort(u); i > 0; --i) {
+ for (int j = readUnsignedShort(u + 8); j > 0; --j) {
+ u += 6 + readInt(u + 12);
+ }
+ u += 8;
+ }
+ // the attribute_info structure starts just after the methods
+ return u + 2;
+ }
+
+ /**
* Reads an attribute in {@link #b b}.
*
- * @param attrs prototypes of the attributes that must be parsed during the
- * visit of the class. Any attribute whose type is not equal to the
- * type of one the prototypes is ignored (i.e. an empty
- * {@link Attribute} instance is returned).
- * @param type the type of the attribute.
- * @param off index of the first byte of the attribute's content in
- * {@link #b b}. The 6 attribute header bytes, containing the type
- * and the length of the attribute, are not taken into account here
- * (they have already been read).
- * @param len the length of the attribute's content.
- * @param buf buffer to be used to call {@link #readUTF8 readUTF8},
- * {@link #readClass(int,char[]) readClass} or
- * {@link #readConst readConst}.
- * @param codeOff index of the first byte of code's attribute content in
- * {@link #b b}, or -1 if the attribute to be read is not a code
- * attribute. The 6 attribute header bytes, containing the type and
- * the length of the attribute, are not taken into account here.
- * @param labels the labels of the method's code, or <tt>null</tt> if the
- * attribute to be read is not a code attribute.
+ * @param attrs
+ * prototypes of the attributes that must be parsed during the
+ * visit of the class. Any attribute whose type is not equal to
+ * the type of one the prototypes is ignored (i.e. an empty
+ * {@link Attribute} instance is returned).
+ * @param type
+ * the type of the attribute.
+ * @param off
+ * index of the first byte of the attribute's content in
+ * {@link #b b}. The 6 attribute header bytes, containing the
+ * type and the length of the attribute, are not taken into
+ * account here (they have already been read).
+ * @param len
+ * the length of the attribute's content.
+ * @param buf
+ * buffer to be used to call {@link #readUTF8 readUTF8},
+ * {@link #readClass(int,char[]) readClass} or {@link #readConst
+ * readConst}.
+ * @param codeOff
+ * index of the first byte of code's attribute content in
+ * {@link #b b}, or -1 if the attribute to be read is not a code
+ * attribute. The 6 attribute header bytes, containing the type
+ * and the length of the attribute, are not taken into account
+ * here.
+ * @param labels
+ * the labels of the method's code, or <tt>null</tt> if the
+ * attribute to be read is not a code attribute.
* @return the attribute that has been read, or <tt>null</tt> to skip this
* attribute.
*/
- private Attribute readAttribute(
- final Attribute[] attrs,
- final String type,
- final int off,
- final int len,
- final char[] buf,
- final int codeOff,
- final Label[] labels)
- {
+ private Attribute readAttribute(final Attribute[] attrs, final String type,
+ final int off, final int len, final char[] buf, final int codeOff,
+ final Label[] labels) {
for (int i = 0; i < attrs.length; ++i) {
if (attrs[i].type.equals(type)) {
return attrs[i].read(this, off, len, buf, codeOff, labels);
@@ -1987,9 +1958,9 @@ public class ClassReader {
// ------------------------------------------------------------------------
/**
- * Returns the number of constant pool items in {@link #b b}.
+ * Returns the number of constant pool items in {@link #b b}.
*
- * @return the number of constant pool items in {@link #b b}.
+ * @return the number of constant pool items in {@link #b b}.
*/
public int getItemCount() {
return items.length;
@@ -2000,7 +1971,8 @@ public class ClassReader {
* one. <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param item the index a constant pool item.
+ * @param item
+ * the index a constant pool item.
* @return the start index of the constant pool item in {@link #b b}, plus
* one.
*/
@@ -2024,7 +1996,8 @@ public class ClassReader {
* {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public int readByte(final int index) {
@@ -2032,11 +2005,12 @@ public class ClassReader {
}
/**
- * Reads an unsigned short value in {@link #b b}. <i>This method is
- * intended for {@link Attribute} sub classes, and is normally not needed by
- * class generators or adapters.</i>
+ * Reads an unsigned short value in {@link #b b}. <i>This method is intended
+ * for {@link Attribute} sub classes, and is normally not needed by class
+ * generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public int readUnsignedShort(final int index) {
@@ -2049,7 +2023,8 @@ public class ClassReader {
* for {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public short readShort(final int index) {
@@ -2062,7 +2037,8 @@ public class ClassReader {
* {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public int readInt(final int index) {
@@ -2072,11 +2048,12 @@ public class ClassReader {
}
/**
- * Reads a signed long value in {@link #b b}. <i>This method is intended
- * for {@link Attribute} sub classes, and is normally not needed by class
+ * Reads a signed long value in {@link #b b}. <i>This method is intended for
+ * {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
- * @param index the start index of the value to be read in {@link #b b}.
+ * @param index
+ * the start index of the value to be read in {@link #b b}.
* @return the read value.
*/
public long readLong(final int index) {
@@ -2090,14 +2067,19 @@ public class ClassReader {
* is intended for {@link Attribute} sub classes, and is normally not needed
* by class generators or adapters.</i>
*
- * @param index the start index of an unsigned short value in {@link #b b},
- * whose value is the index of an UTF8 constant pool item.
- * @param buf buffer to be used to read the item. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param index
+ * the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of an UTF8 constant pool item.
+ * @param buf
+ * buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the String corresponding to the specified UTF8 item.
*/
public String readUTF8(int index, final char[] buf) {
int item = readUnsignedShort(index);
+ if (index == 0 || item == 0) {
+ return null;
+ }
String s = strings[item];
if (s != null) {
return s;
@@ -2109,10 +2091,13 @@ public class ClassReader {
/**
* Reads UTF8 string in {@link #b b}.
*
- * @param index start offset of the UTF8 string to be read.
- * @param utfLen length of the UTF8 string to be read.
- * @param buf buffer to be used to read the string. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param index
+ * start offset of the UTF8 string to be read.
+ * @param utfLen
+ * length of the UTF8 string to be read.
+ * @param buf
+ * buffer to be used to read the string. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the String corresponding to the specified UTF8 string.
*/
private String readUTF(int index, final int utfLen, final char[] buf) {
@@ -2125,28 +2110,28 @@ public class ClassReader {
while (index < endIndex) {
c = b[index++];
switch (st) {
- case 0:
- c = c & 0xFF;
- if (c < 0x80) { // 0xxxxxxx
- buf[strLen++] = (char) c;
- } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
- cc = (char) (c & 0x1F);
- st = 1;
- } else { // 1110 xxxx 10xx xxxx 10xx xxxx
- cc = (char) (c & 0x0F);
- st = 2;
- }
- break;
+ case 0:
+ c = c & 0xFF;
+ if (c < 0x80) { // 0xxxxxxx
+ buf[strLen++] = (char) c;
+ } else if (c < 0xE0 && c > 0xBF) { // 110x xxxx 10xx xxxx
+ cc = (char) (c & 0x1F);
+ st = 1;
+ } else { // 1110 xxxx 10xx xxxx 10xx xxxx
+ cc = (char) (c & 0x0F);
+ st = 2;
+ }
+ break;
- case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
- buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
- st = 0;
- break;
+ case 1: // byte 2 of 2-byte char or byte 3 of 3-byte char
+ buf[strLen++] = (char) ((cc << 6) | (c & 0x3F));
+ st = 0;
+ break;
- case 2: // byte 2 of 3-byte char
- cc = (char) ((cc << 6) | (c & 0x3F));
- st = 1;
- break;
+ case 2: // byte 2 of 3-byte char
+ cc = (char) ((cc << 6) | (c & 0x3F));
+ st = 1;
+ break;
}
}
return new String(buf, 0, strLen);
@@ -2157,10 +2142,12 @@ public class ClassReader {
* intended for {@link Attribute} sub classes, and is normally not needed by
* class generators or adapters.</i>
*
- * @param index the start index of an unsigned short value in {@link #b b},
- * whose value is the index of a class constant pool item.
- * @param buf buffer to be used to read the item. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param index
+ * the start index of an unsigned short value in {@link #b b},
+ * whose value is the index of a class constant pool item.
+ * @param buf
+ * buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the String corresponding to the specified class item.
*/
public String readClass(final int index, final char[] buf) {
@@ -2175,9 +2162,11 @@ public class ClassReader {
* method is intended for {@link Attribute} sub classes, and is normally not
* needed by class generators or adapters.</i>
*
- * @param item the index of a constant pool item.
- * @param buf buffer to be used to read the item. This buffer must be
- * sufficiently large. It is not automatically resized.
+ * @param item
+ * the index of a constant pool item.
+ * @param buf
+ * buffer to be used to read the item. This buffer must be
+ * sufficiently large. It is not automatically resized.
* @return the {@link Integer}, {@link Float}, {@link Long}, {@link Double},
* {@link String}, {@link Type} or {@link Handle} corresponding to
* the given constant pool item.
@@ -2185,32 +2174,29 @@ public class ClassReader {
public Object readConst(final int item, final char[] buf) {
int index = items[item];
switch (b[index - 1]) {
- case ClassWriter.INT:
- return new Integer(readInt(index));
- case ClassWriter.FLOAT:
- return new Float(Float.intBitsToFloat(readInt(index)));
- case ClassWriter.LONG:
- return new Long(readLong(index));
- case ClassWriter.DOUBLE:
- return new Double(Double.longBitsToDouble(readLong(index)));
- case ClassWriter.CLASS:
- return Type.getObjectType(readUTF8(index, buf));
- case ClassWriter.STR:
- return readUTF8(index, buf);
- case ClassWriter.MTYPE:
- return Type.getMethodType(readUTF8(index, buf));
-
- //case ClassWriter.HANDLE_BASE + [1..9]:
- default: {
- int tag = readByte(index);
- int[] items = this.items;
- int cpIndex = items[readUnsignedShort(index + 1)];
- String owner = readClass(cpIndex, buf);
- cpIndex = items[readUnsignedShort(cpIndex + 2)];
- String name = readUTF8(cpIndex, buf);
- String desc = readUTF8(cpIndex + 2, buf);
- return new Handle(tag, owner, name, desc);
- }
+ case ClassWriter.INT:
+ return new Integer(readInt(index));
+ case ClassWriter.FLOAT:
+ return new Float(Float.intBitsToFloat(readInt(index)));
+ case ClassWriter.LONG:
+ return new Long(readLong(index));
+ case ClassWriter.DOUBLE:
+ return new Double(Double.longBitsToDouble(readLong(index)));
+ case ClassWriter.CLASS:
+ return Type.getObjectType(readUTF8(index, buf));
+ case ClassWriter.STR:
+ return readUTF8(index, buf);
+ case ClassWriter.MTYPE:
+ return Type.getMethodType(readUTF8(index, buf));
+ default: // case ClassWriter.HANDLE_BASE + [1..9]:
+ int tag = readByte(index);
+ int[] items = this.items;
+ int cpIndex = items[readUnsignedShort(index + 1)];
+ String owner = readClass(cpIndex, buf);
+ cpIndex = items[readUnsignedShort(cpIndex + 2)];
+ String name = readUTF8(cpIndex, buf);
+ String desc = readUTF8(cpIndex + 2, buf);
+ return new Handle(tag, owner, name, desc);
}
}
}
diff --git a/src/asm/scala/tools/asm/ClassVisitor.java b/src/asm/scala/tools/asm/ClassVisitor.java
index ae38ae0ab9..3fc364d5e5 100644
--- a/src/asm/scala/tools/asm/ClassVisitor.java
+++ b/src/asm/scala/tools/asm/ClassVisitor.java
@@ -30,11 +30,11 @@
package scala.tools.asm;
/**
- * A visitor to visit a Java class. The methods of this class must be called
- * in the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
+ * A visitor to visit a Java class. The methods of this class must be called in
+ * the following order: <tt>visit</tt> [ <tt>visitSource</tt> ] [
* <tt>visitOuterClass</tt> ] ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> |
- * <tt>visitField</tt> | <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
+ * <tt>visitAttribute</tt> )* ( <tt>visitInnerClass</tt> | <tt>visitField</tt> |
+ * <tt>visitMethod</tt> )* <tt>visitEnd</tt>.
*
* @author Eric Bruneton
*/
@@ -55,8 +55,9 @@ public abstract class ClassVisitor {
/**
* Constructs a new {@link ClassVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public ClassVisitor(final int api) {
this(api, null);
@@ -65,15 +66,17 @@ public abstract class ClassVisitor {
/**
* Constructs a new {@link ClassVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param cv the class visitor to which this visitor must delegate method
- * calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv
+ * the class visitor to which this visitor must delegate method
+ * calls. May be null.
*/
public ClassVisitor(final int api, final ClassVisitor cv) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.cv = cv;
}
@@ -81,30 +84,30 @@ public abstract class ClassVisitor {
/**
* Visits the header of the class.
*
- * @param version the class version.
- * @param access the class's access flags (see {@link Opcodes}). This
- * parameter also indicates if the class is deprecated.
- * @param name the internal name of the class (see
- * {@link Type#getInternalName() getInternalName}).
- * @param signature the signature of this class. May be <tt>null</tt> if
- * the class is not a generic one, and does not extend or implement
- * generic classes or interfaces.
- * @param superName the internal of name of the super class (see
- * {@link Type#getInternalName() getInternalName}). For interfaces,
- * the super class is {@link Object}. May be <tt>null</tt>, but
- * only for the {@link Object} class.
- * @param interfaces the internal names of the class's interfaces (see
- * {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param version
+ * the class version.
+ * @param access
+ * the class's access flags (see {@link Opcodes}). This parameter
+ * also indicates if the class is deprecated.
+ * @param name
+ * the internal name of the class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param signature
+ * the signature of this class. May be <tt>null</tt> if the class
+ * is not a generic one, and does not extend or implement generic
+ * classes or interfaces.
+ * @param superName
+ * the internal of name of the super class (see
+ * {@link Type#getInternalName() getInternalName}). For
+ * interfaces, the super class is {@link Object}. May be
+ * <tt>null</tt>, but only for the {@link Object} class.
+ * @param interfaces
+ * the internal names of the class's interfaces (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
- public void visit(
- int version,
- int access,
- String name,
- String signature,
- String superName,
- String[] interfaces)
- {
+ public void visit(int version, int access, String name, String signature,
+ String superName, String[] interfaces) {
if (cv != null) {
cv.visit(version, access, name, signature, superName, interfaces);
}
@@ -113,11 +116,13 @@ public abstract class ClassVisitor {
/**
* Visits the source of the class.
*
- * @param source the name of the source file from which the class was
- * compiled. May be <tt>null</tt>.
- * @param debug additional debug information to compute the correspondance
- * between source and compiled elements of the class. May be
- * <tt>null</tt>.
+ * @param source
+ * the name of the source file from which the class was compiled.
+ * May be <tt>null</tt>.
+ * @param debug
+ * additional debug information to compute the correspondance
+ * between source and compiled elements of the class. May be
+ * <tt>null</tt>.
*/
public void visitSource(String source, String debug) {
if (cv != null) {
@@ -129,16 +134,19 @@ public abstract class ClassVisitor {
* Visits the enclosing class of the class. This method must be called only
* if the class has an enclosing class.
*
- * @param owner internal name of the enclosing class of the class.
- * @param name the name of the method that contains the class, or
- * <tt>null</tt> if the class is not enclosed in a method of its
- * enclosing class.
- * @param desc the descriptor of the method that contains the class, or
- * <tt>null</tt> if the class is not enclosed in a method of its
- * enclosing class.
+ * @param owner
+ * internal name of the enclosing class of the class.
+ * @param name
+ * the name of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
+ * @param desc
+ * the descriptor of the method that contains the class, or
+ * <tt>null</tt> if the class is not enclosed in a method of its
+ * enclosing class.
*/
public void visitOuterClass(String owner, String name, String desc) {
- if (cv != null) {
+ if (cv != null) {
cv.visitOuterClass(owner, name, desc);
}
}
@@ -146,8 +154,10 @@ public abstract class ClassVisitor {
/**
* Visits an annotation of the class.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
@@ -161,7 +171,8 @@ public abstract class ClassVisitor {
/**
* Visits a non standard attribute of the class.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(Attribute attr) {
if (cv != null) {
@@ -173,23 +184,22 @@ public abstract class ClassVisitor {
* Visits information about an inner class. This inner class is not
* necessarily a member of the class being visited.
*
- * @param name the internal name of an inner class (see
- * {@link Type#getInternalName() getInternalName}).
- * @param outerName the internal name of the class to which the inner class
- * belongs (see {@link Type#getInternalName() getInternalName}). May
- * be <tt>null</tt> for not member classes.
- * @param innerName the (simple) name of the inner class inside its
- * enclosing class. May be <tt>null</tt> for anonymous inner
- * classes.
- * @param access the access flags of the inner class as originally declared
- * in the enclosing class.
+ * @param name
+ * the internal name of an inner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param outerName
+ * the internal name of the class to which the inner class
+ * belongs (see {@link Type#getInternalName() getInternalName}).
+ * May be <tt>null</tt> for not member classes.
+ * @param innerName
+ * the (simple) name of the inner class inside its enclosing
+ * class. May be <tt>null</tt> for anonymous inner classes.
+ * @param access
+ * the access flags of the inner class as originally declared in
+ * the enclosing class.
*/
- public void visitInnerClass(
- String name,
- String outerName,
- String innerName,
- int access)
- {
+ public void visitInnerClass(String name, String outerName,
+ String innerName, int access) {
if (cv != null) {
cv.visitInnerClass(name, outerName, innerName, access);
}
@@ -198,33 +208,32 @@ public abstract class ClassVisitor {
/**
* Visits a field of the class.
*
- * @param access the field's access flags (see {@link Opcodes}). This
- * parameter also indicates if the field is synthetic and/or
- * deprecated.
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link Type Type}).
- * @param signature the field's signature. May be <tt>null</tt> if the
- * field's type does not use generic types.
- * @param value the field's initial value. This parameter, which may be
- * <tt>null</tt> if the field does not have an initial value, must
- * be an {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String} (for <tt>int</tt>,
- * <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
- * respectively). <i>This parameter is only used for static fields</i>.
- * Its value is ignored for non static fields, which must be
- * initialized through bytecode instructions in constructors or
- * methods.
+ * @param access
+ * the field's access flags (see {@link Opcodes}). This parameter
+ * also indicates if the field is synthetic and/or deprecated.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link Type Type}).
+ * @param signature
+ * the field's signature. May be <tt>null</tt> if the field's
+ * type does not use generic types.
+ * @param value
+ * the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value,
+ * must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String} (for <tt>int</tt>,
+ * <tt>float</tt>, <tt>long</tt> or <tt>String</tt> fields
+ * respectively). <i>This parameter is only used for static
+ * fields</i>. Its value is ignored for non static fields, which
+ * must be initialized through bytecode instructions in
+ * constructors or methods.
* @return a visitor to visit field annotations and attributes, or
- * <tt>null</tt> if this class visitor is not interested in
- * visiting these annotations and attributes.
+ * <tt>null</tt> if this class visitor is not interested in visiting
+ * these annotations and attributes.
*/
- public FieldVisitor visitField(
- int access,
- String name,
- String desc,
- String signature,
- Object value)
- {
+ public FieldVisitor visitField(int access, String name, String desc,
+ String signature, Object value) {
if (cv != null) {
return cv.visitField(access, name, desc, signature, value);
}
@@ -233,31 +242,31 @@ public abstract class ClassVisitor {
/**
* Visits a method of the class. This method <i>must</i> return a new
- * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is
- * called, i.e., it should not return a previously returned visitor.
+ * {@link MethodVisitor} instance (or <tt>null</tt>) each time it is called,
+ * i.e., it should not return a previously returned visitor.
*
- * @param access the method's access flags (see {@link Opcodes}). This
- * parameter also indicates if the method is synthetic and/or
- * deprecated.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
- * @param signature the method's signature. May be <tt>null</tt> if the
- * method parameters, return type and exceptions do not use generic
- * types.
- * @param exceptions the internal names of the method's exception classes
- * (see {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param access
+ * the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt> if the method
+ * parameters, return type and exceptions do not use generic
+ * types.
+ * @param exceptions
+ * the internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
* @return an object to visit the byte code of the method, or <tt>null</tt>
* if this class visitor is not interested in visiting the code of
* this method.
*/
- public MethodVisitor visitMethod(
- int access,
- String name,
- String desc,
- String signature,
- String[] exceptions)
- {
+ public MethodVisitor visitMethod(int access, String name, String desc,
+ String signature, String[] exceptions) {
if (cv != null) {
return cv.visitMethod(access, name, desc, signature, exceptions);
}
diff --git a/src/asm/scala/tools/asm/ClassWriter.java b/src/asm/scala/tools/asm/ClassWriter.java
index c7a0736b51..93ed7313c7 100644
--- a/src/asm/scala/tools/asm/ClassWriter.java
+++ b/src/asm/scala/tools/asm/ClassWriter.java
@@ -66,12 +66,18 @@ public class ClassWriter extends ClassVisitor {
public static final int COMPUTE_FRAMES = 2;
/**
- * Pseudo access flag to distinguish between the synthetic attribute and
- * the synthetic access flag.
+ * Pseudo access flag to distinguish between the synthetic attribute and the
+ * synthetic access flag.
*/
static final int ACC_SYNTHETIC_ATTRIBUTE = 0x40000;
/**
+ * Factor to convert from ACC_SYNTHETIC_ATTRIBUTE to Opcode.ACC_SYNTHETIC.
+ */
+ static final int TO_ACC_SYNTHETIC = ACC_SYNTHETIC_ATTRIBUTE
+ / Opcodes.ACC_SYNTHETIC;
+
+ /**
* The type of instructions without any argument.
*/
static final int NOARG_INSN = 0;
@@ -238,8 +244,8 @@ public class ClassWriter extends ClassVisitor {
/**
* The base value for all CONSTANT_MethodHandle constant pool items.
- * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into
- * 9 different items.
+ * Internally, ASM store the 9 variations of CONSTANT_MethodHandle into 9
+ * different items.
*/
static final int HANDLE_BASE = 20;
@@ -266,9 +272,8 @@ public class ClassWriter extends ClassVisitor {
static final int TYPE_MERGED = 32;
/**
- * The type of BootstrapMethods items. These items are stored in a
- * special class attribute named BootstrapMethods and
- * not in the constant pool.
+ * The type of BootstrapMethods items. These items are stored in a special
+ * class attribute named BootstrapMethods and not in the constant pool.
*/
static final int BSM = 33;
@@ -327,10 +332,10 @@ public class ClassWriter extends ClassVisitor {
* necessarily be stored in the constant pool. This type table is used by
* the control flow and data flow analysis algorithm used to compute stack
* map frames from scratch. This array associates to each index <tt>i</tt>
- * the Item whose index is <tt>i</tt>. All Item objects stored in this
- * array are also stored in the {@link #items} hash table. These two arrays
- * allow to retrieve an Item from its index or, conversely, to get the index
- * of an Item from its value. Each Item stores an internal name in its
+ * the Item whose index is <tt>i</tt>. All Item objects stored in this array
+ * are also stored in the {@link #items} hash table. These two arrays allow
+ * to retrieve an Item from its index or, conversely, to get the index of an
+ * Item from its value. Each Item stores an internal name in its
* {@link Item#strVal1} field.
*/
Item[] typeTable;
@@ -439,16 +444,16 @@ public class ClassWriter extends ClassVisitor {
/**
* The fields of this class. These fields are stored in a linked list of
* {@link FieldWriter} objects, linked to each other by their
- * {@link FieldWriter#fv} field. This field stores the first element of
- * this list.
+ * {@link FieldWriter#fv} field. This field stores the first element of this
+ * list.
*/
FieldWriter firstField;
/**
* The fields of this class. These fields are stored in a linked list of
* {@link FieldWriter} objects, linked to each other by their
- * {@link FieldWriter#fv} field. This field stores the last element of
- * this list.
+ * {@link FieldWriter#fv} field. This field stores the last element of this
+ * list.
*/
FieldWriter lastField;
@@ -463,8 +468,8 @@ public class ClassWriter extends ClassVisitor {
/**
* The methods of this class. These methods are stored in a linked list of
* {@link MethodWriter} objects, linked to each other by their
- * {@link MethodWriter#mv} field. This field stores the last element of
- * this list.
+ * {@link MethodWriter#mv} field. This field stores the last element of this
+ * list.
*/
MethodWriter lastMethod;
@@ -584,8 +589,10 @@ public class ClassWriter extends ClassVisitor {
/**
* Constructs a new {@link ClassWriter} object.
*
- * @param flags option flags that can be used to modify the default behavior
- * of this class. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. See {@link #COMPUTE_MAXS},
+ * {@link #COMPUTE_FRAMES}.
*/
public ClassWriter(final int flags) {
super(Opcodes.ASM4);
@@ -606,26 +613,32 @@ public class ClassWriter extends ClassVisitor {
* "mostly add" bytecode transformations. These optimizations are the
* following:
*
- * <ul> <li>The constant pool from the original class is copied as is in the
- * new class, which saves time. New constant pool entries will be added at
- * the end if necessary, but unused constant pool entries <i>won't be
- * removed</i>.</li> <li>Methods that are not transformed are copied as is
- * in the new class, directly from the original class bytecode (i.e. without
- * emitting visit events for all the method instructions), which saves a
- * <i>lot</i> of time. Untransformed methods are detected by the fact that
- * the {@link ClassReader} receives {@link MethodVisitor} objects that come
- * from a {@link ClassWriter} (and not from any other {@link ClassVisitor}
- * instance).</li> </ul>
+ * <ul>
+ * <li>The constant pool from the original class is copied as is in the new
+ * class, which saves time. New constant pool entries will be added at the
+ * end if necessary, but unused constant pool entries <i>won't be
+ * removed</i>.</li>
+ * <li>Methods that are not transformed are copied as is in the new class,
+ * directly from the original class bytecode (i.e. without emitting visit
+ * events for all the method instructions), which saves a <i>lot</i> of
+ * time. Untransformed methods are detected by the fact that the
+ * {@link ClassReader} receives {@link MethodVisitor} objects that come from
+ * a {@link ClassWriter} (and not from any other {@link ClassVisitor}
+ * instance).</li>
+ * </ul>
*
- * @param classReader the {@link ClassReader} used to read the original
- * class. It will be used to copy the entire constant pool from the
- * original class and also to copy other fragments of original
- * bytecode where applicable.
- * @param flags option flags that can be used to modify the default behavior
- * of this class. <i>These option flags do not affect methods that
- * are copied as is in the new class. This means that the maximum
- * stack size nor the stack frames will be computed for these
- * methods</i>. See {@link #COMPUTE_MAXS}, {@link #COMPUTE_FRAMES}.
+ * @param classReader
+ * the {@link ClassReader} used to read the original class. It
+ * will be used to copy the entire constant pool from the
+ * original class and also to copy other fragments of original
+ * bytecode where applicable.
+ * @param flags
+ * option flags that can be used to modify the default behavior
+ * of this class. <i>These option flags do not affect methods
+ * that are copied as is in the new class. This means that the
+ * maximum stack size nor the stack frames will be computed for
+ * these methods</i>. See {@link #COMPUTE_MAXS},
+ * {@link #COMPUTE_FRAMES}.
*/
public ClassWriter(final ClassReader classReader, final int flags) {
this(flags);
@@ -638,14 +651,9 @@ public class ClassWriter extends ClassVisitor {
// ------------------------------------------------------------------------
@Override
- public final void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public final void visit(final int version, final int access,
+ final String name, final String signature, final String superName,
+ final String[] interfaces) {
this.version = version;
this.access = access;
this.name = newClass(name);
@@ -674,11 +682,8 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public final void visitOuterClass(final String owner, final String name,
+ final String desc) {
enclosingMethodOwner = newClass(owner);
if (name != null && desc != null) {
enclosingMethod = newNameType(name, desc);
@@ -686,10 +691,8 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public final AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -714,12 +717,8 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public final void visitInnerClass(final String name,
+ final String outerName, final String innerName, final int access) {
if (innerClasses == null) {
innerClasses = new ByteVector();
}
@@ -731,32 +730,16 @@ public class ClassWriter extends ClassVisitor {
}
@Override
- public final FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public final FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
return new FieldWriter(this, access, name, desc, signature, value);
}
@Override
- public final MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
- return new MethodWriter(this,
- access,
- name,
- desc,
- signature,
- exceptions,
- computeMaxs,
- computeFrames);
+ public final MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
+ return new MethodWriter(this, access, name, desc, signature,
+ exceptions, computeMaxs, computeFrames);
}
@Override
@@ -773,7 +756,7 @@ public class ClassWriter extends ClassVisitor {
* @return the bytecode of the class that was build with this class writer.
*/
public byte[] toByteArray() {
- if (index > Short.MAX_VALUE) {
+ if (index > 0xFFFF) {
throw new RuntimeException("Class file too large!");
}
// computes the real size of the bytecode of this class
@@ -793,8 +776,9 @@ public class ClassWriter extends ClassVisitor {
mb = (MethodWriter) mb.mv;
}
int attributeCount = 0;
- if (bootstrapMethods != null) { // we put it as first argument in order
- // to improve a bit ClassReader.copyBootstrapMethods
+ if (bootstrapMethods != null) {
+ // we put it as first attribute in order to improve a bit
+ // ClassReader.copyBootstrapMethods
++attributeCount;
size += 8 + bootstrapMethods.length;
newUTF8("BootstrapMethods");
@@ -824,12 +808,13 @@ public class ClassWriter extends ClassVisitor {
size += 6;
newUTF8("Deprecated");
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- ++attributeCount;
- size += 6;
- newUTF8("Synthetic");
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((version & 0xFFFF) < Opcodes.V1_5
+ || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ ++attributeCount;
+ size += 6;
+ newUTF8("Synthetic");
+ }
}
if (innerClasses != null) {
++attributeCount;
@@ -856,9 +841,8 @@ public class ClassWriter extends ClassVisitor {
ByteVector out = new ByteVector(size);
out.putInt(0xCAFEBABE).putInt(version);
out.putShort(index).putByteArray(pool.data, 0, pool.length);
- int mask = Opcodes.ACC_DEPRECATED
- | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ int mask = Opcodes.ACC_DEPRECATED | ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ACC_SYNTHETIC_ATTRIBUTE) / TO_ACC_SYNTHETIC);
out.putShort(access & ~mask).putShort(name).putShort(superName);
out.putShort(interfaceCount);
for (int i = 0; i < interfaceCount; ++i) {
@@ -877,9 +861,10 @@ public class ClassWriter extends ClassVisitor {
mb = (MethodWriter) mb.mv;
}
out.putShort(attributeCount);
- if (bootstrapMethods != null) { // should be the first class attribute ?
+ if (bootstrapMethods != null) {
out.putShort(newUTF8("BootstrapMethods"));
- out.putInt(bootstrapMethods.length + 2).putShort(bootstrapMethodsCount);
+ out.putInt(bootstrapMethods.length + 2).putShort(
+ bootstrapMethodsCount);
out.putByteArray(bootstrapMethods.data, 0, bootstrapMethods.length);
}
if (ClassReader.SIGNATURES && signature != 0) {
@@ -900,10 +885,11 @@ public class ClassWriter extends ClassVisitor {
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(newUTF8("Deprecated")).putInt(0);
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((version & 0xFFFF) < Opcodes.V1_5 || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- out.putShort(newUTF8("Synthetic")).putInt(0);
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((version & 0xFFFF) < Opcodes.V1_5
+ || (access & ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ out.putShort(newUTF8("Synthetic")).putInt(0);
+ }
}
if (innerClasses != null) {
out.putShort(newUTF8("InnerClasses"));
@@ -937,10 +923,11 @@ public class ClassWriter extends ClassVisitor {
* Adds a number or string constant to the constant pool of the class being
* build. Does nothing if the constant pool already contains a similar item.
*
- * @param cst the value of the constant to be added to the constant pool.
- * This parameter must be an {@link Integer}, a {@link Float}, a
- * {@link Long}, a {@link Double}, a {@link String} or a
- * {@link Type}.
+ * @param cst
+ * the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double}, a {@link String} or a
+ * {@link Type}.
* @return a new or already existing constant item with the given value.
*/
Item newConstItem(final Object cst) {
@@ -973,12 +960,12 @@ public class ClassWriter extends ClassVisitor {
} else if (cst instanceof Type) {
Type t = (Type) cst;
int s = t.getSort();
- if (s == Type.ARRAY) {
- return newClassItem(t.getDescriptor());
- } else if (s == Type.OBJECT) {
+ if (s == Type.OBJECT) {
return newClassItem(t.getInternalName());
- } else { // s == Type.METHOD
+ } else if (s == Type.METHOD) {
return newMethodTypeItem(t.getDescriptor());
+ } else { // s == primitive type or array
+ return newClassItem(t.getDescriptor());
}
} else if (cst instanceof Handle) {
Handle h = (Handle) cst;
@@ -994,9 +981,10 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param cst the value of the constant to be added to the constant pool.
- * This parameter must be an {@link Integer}, a {@link Float}, a
- * {@link Long}, a {@link Double} or a {@link String}.
+ * @param cst
+ * the value of the constant to be added to the constant pool.
+ * This parameter must be an {@link Integer}, a {@link Float}, a
+ * {@link Long}, a {@link Double} or a {@link String}.
* @return the index of a new or already existing constant item with the
* given value.
*/
@@ -1010,7 +998,8 @@ public class ClassWriter extends ClassVisitor {
* method is intended for {@link Attribute} sub classes, and is normally not
* needed by class generators or adapters.</i>
*
- * @param value the String value.
+ * @param value
+ * the String value.
* @return the index of a new or already existing UTF8 item.
*/
public int newUTF8(final String value) {
@@ -1030,7 +1019,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param value the internal name of the class.
+ * @param value
+ * the internal name of the class.
* @return a new or already existing class reference item.
*/
Item newClassItem(final String value) {
@@ -1050,7 +1040,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param value the internal name of the class.
+ * @param value
+ * the internal name of the class.
* @return the index of a new or already existing class reference item.
*/
public int newClass(final String value) {
@@ -1063,7 +1054,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param methodDesc method descriptor of the method type.
+ * @param methodDesc
+ * method descriptor of the method type.
* @return a new or already existing method type reference item.
*/
Item newMethodTypeItem(final String methodDesc) {
@@ -1083,7 +1075,8 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param methodDesc method descriptor of the method type.
+ * @param methodDesc
+ * method descriptor of the method type.
* @return the index of a new or already existing method type reference
* item.
*/
@@ -1097,33 +1090,34 @@ public class ClassWriter extends ClassVisitor {
* intended for {@link Attribute} sub classes, and is normally not needed by
* class generators or adapters.</i>
*
- * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
- * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
- * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
- * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
- * {@link Opcodes#H_NEWINVOKESPECIAL} or
- * {@link Opcodes#H_INVOKEINTERFACE}.
- * @param owner the internal name of the field or method owner class.
- * @param name the name of the field or method.
- * @param desc the descriptor of the field or method.
+ * @param tag
+ * the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner
+ * the internal name of the field or method owner class.
+ * @param name
+ * the name of the field or method.
+ * @param desc
+ * the descriptor of the field or method.
* @return a new or an already existing method type reference item.
*/
- Item newHandleItem(
- final int tag,
- final String owner,
- final String name,
- final String desc)
- {
+ Item newHandleItem(final int tag, final String owner, final String name,
+ final String desc) {
key4.set(HANDLE_BASE + tag, owner, name, desc);
Item result = get(key4);
if (result == null) {
if (tag <= Opcodes.H_PUTSTATIC) {
put112(HANDLE, tag, newField(owner, name, desc));
} else {
- put112(HANDLE, tag, newMethod(owner,
- name,
- desc,
- tag == Opcodes.H_INVOKEINTERFACE));
+ put112(HANDLE,
+ tag,
+ newMethod(owner, name, desc,
+ tag == Opcodes.H_INVOKEINTERFACE));
}
result = new Item(index++, key4);
put(result);
@@ -1132,29 +1126,30 @@ public class ClassWriter extends ClassVisitor {
}
/**
- * Adds a handle to the constant pool of the class being
- * build. Does nothing if the constant pool already contains a similar item.
- * <i>This method is intended for {@link Attribute} sub classes, and is
- * normally not needed by class generators or adapters.</i>
+ * Adds a handle to the constant pool of the class being build. Does nothing
+ * if the constant pool already contains a similar item. <i>This method is
+ * intended for {@link Attribute} sub classes, and is normally not needed by
+ * class generators or adapters.</i>
*
- * @param tag the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
- * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
- * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
- * {@link Opcodes#H_INVOKESTATIC}, {@link Opcodes#H_INVOKESPECIAL},
- * {@link Opcodes#H_NEWINVOKESPECIAL} or
- * {@link Opcodes#H_INVOKEINTERFACE}.
- * @param owner the internal name of the field or method owner class.
- * @param name the name of the field or method.
- * @param desc the descriptor of the field or method.
+ * @param tag
+ * the kind of this handle. Must be {@link Opcodes#H_GETFIELD},
+ * {@link Opcodes#H_GETSTATIC}, {@link Opcodes#H_PUTFIELD},
+ * {@link Opcodes#H_PUTSTATIC}, {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner
+ * the internal name of the field or method owner class.
+ * @param name
+ * the name of the field or method.
+ * @param desc
+ * the descriptor of the field or method.
* @return the index of a new or already existing method type reference
* item.
*/
- public int newHandle(
- final int tag,
- final String owner,
- final String name,
- final String desc)
- {
+ public int newHandle(final int tag, final String owner, final String name,
+ final String desc) {
return newHandleItem(tag, owner, name, desc).index;
}
@@ -1164,19 +1159,19 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param name name of the invoked method.
- * @param desc descriptor of the invoke method.
- * @param bsm the bootstrap method.
- * @param bsmArgs the bootstrap method constant arguments.
+ * @param name
+ * name of the invoked method.
+ * @param desc
+ * descriptor of the invoke method.
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the bootstrap method constant arguments.
*
* @return a new or an already existing invokedynamic type reference item.
*/
- Item newInvokeDynamicItem(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ Item newInvokeDynamicItem(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
// cache for performance
ByteVector bootstrapMethods = this.bootstrapMethods;
if (bootstrapMethods == null) {
@@ -1186,9 +1181,7 @@ public class ClassWriter extends ClassVisitor {
int position = bootstrapMethods.length; // record current position
int hashCode = bsm.hashCode();
- bootstrapMethods.putShort(newHandle(bsm.tag,
- bsm.owner,
- bsm.name,
+ bootstrapMethods.putShort(newHandle(bsm.tag, bsm.owner, bsm.name,
bsm.desc));
int argsLength = bsmArgs.length;
@@ -1250,20 +1243,20 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param name name of the invoked method.
- * @param desc descriptor of the invoke method.
- * @param bsm the bootstrap method.
- * @param bsmArgs the bootstrap method constant arguments.
+ * @param name
+ * name of the invoked method.
+ * @param desc
+ * descriptor of the invoke method.
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the bootstrap method constant arguments.
*
- * @return the index of a new or already existing invokedynamic
- * reference item.
- */
- public int newInvokeDynamic(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ * @return the index of a new or already existing invokedynamic reference
+ * item.
+ */
+ public int newInvokeDynamic(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
return newInvokeDynamicItem(name, desc, bsm, bsmArgs).index;
}
@@ -1271,13 +1264,15 @@ public class ClassWriter extends ClassVisitor {
* Adds a field reference to the constant pool of the class being build.
* Does nothing if the constant pool already contains a similar item.
*
- * @param owner the internal name of the field's owner class.
- * @param name the field's name.
- * @param desc the field's descriptor.
+ * @param owner
+ * the internal name of the field's owner class.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor.
* @return a new or already existing field reference item.
*/
- Item newFieldItem(final String owner, final String name, final String desc)
- {
+ Item newFieldItem(final String owner, final String name, final String desc) {
key3.set(FIELD, owner, name, desc);
Item result = get(key3);
if (result == null) {
@@ -1294,13 +1289,15 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param owner the internal name of the field's owner class.
- * @param name the field's name.
- * @param desc the field's descriptor.
+ * @param owner
+ * the internal name of the field's owner class.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor.
* @return the index of a new or already existing field reference item.
*/
- public int newField(final String owner, final String name, final String desc)
- {
+ public int newField(final String owner, final String name, final String desc) {
return newFieldItem(owner, name, desc).index;
}
@@ -1308,18 +1305,18 @@ public class ClassWriter extends ClassVisitor {
* Adds a method reference to the constant pool of the class being build.
* Does nothing if the constant pool already contains a similar item.
*
- * @param owner the internal name of the method's owner class.
- * @param name the method's name.
- * @param desc the method's descriptor.
- * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @param owner
+ * the internal name of the method's owner class.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor.
+ * @param itf
+ * <tt>true</tt> if <tt>owner</tt> is an interface.
* @return a new or already existing method reference item.
*/
- Item newMethodItem(
- final String owner,
- final String name,
- final String desc,
- final boolean itf)
- {
+ Item newMethodItem(final String owner, final String name,
+ final String desc, final boolean itf) {
int type = itf ? IMETH : METH;
key3.set(type, owner, name, desc);
Item result = get(key3);
@@ -1337,18 +1334,18 @@ public class ClassWriter extends ClassVisitor {
* <i>This method is intended for {@link Attribute} sub classes, and is
* normally not needed by class generators or adapters.</i>
*
- * @param owner the internal name of the method's owner class.
- * @param name the method's name.
- * @param desc the method's descriptor.
- * @param itf <tt>true</tt> if <tt>owner</tt> is an interface.
+ * @param owner
+ * the internal name of the method's owner class.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor.
+ * @param itf
+ * <tt>true</tt> if <tt>owner</tt> is an interface.
* @return the index of a new or already existing method reference item.
*/
- public int newMethod(
- final String owner,
- final String name,
- final String desc,
- final boolean itf)
- {
+ public int newMethod(final String owner, final String name,
+ final String desc, final boolean itf) {
return newMethodItem(owner, name, desc, itf).index;
}
@@ -1356,7 +1353,8 @@ public class ClassWriter extends ClassVisitor {
* Adds an integer to the constant pool of the class being build. Does
* nothing if the constant pool already contains a similar item.
*
- * @param value the int value.
+ * @param value
+ * the int value.
* @return a new or already existing int item.
*/
Item newInteger(final int value) {
@@ -1374,7 +1372,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a float to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the float value.
+ * @param value
+ * the float value.
* @return a new or already existing float item.
*/
Item newFloat(final float value) {
@@ -1392,7 +1391,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a long to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the long value.
+ * @param value
+ * the long value.
* @return a new or already existing long item.
*/
Item newLong(final long value) {
@@ -1411,7 +1411,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a double to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the double value.
+ * @param value
+ * the double value.
* @return a new or already existing double item.
*/
Item newDouble(final double value) {
@@ -1430,7 +1431,8 @@ public class ClassWriter extends ClassVisitor {
* Adds a string to the constant pool of the class being build. Does nothing
* if the constant pool already contains a similar item.
*
- * @param value the String value.
+ * @param value
+ * the String value.
* @return a new or already existing string item.
*/
private Item newString(final String value) {
@@ -1450,8 +1452,10 @@ public class ClassWriter extends ClassVisitor {
* method is intended for {@link Attribute} sub classes, and is normally not
* needed by class generators or adapters.</i>
*
- * @param name a name.
- * @param desc a type descriptor.
+ * @param name
+ * a name.
+ * @param desc
+ * a type descriptor.
* @return the index of a new or already existing name and type item.
*/
public int newNameType(final String name, final String desc) {
@@ -1462,8 +1466,10 @@ public class ClassWriter extends ClassVisitor {
* Adds a name and type to the constant pool of the class being build. Does
* nothing if the constant pool already contains a similar item.
*
- * @param name a name.
- * @param desc a type descriptor.
+ * @param name
+ * a name.
+ * @param desc
+ * a type descriptor.
* @return a new or already existing name and type item.
*/
Item newNameTypeItem(final String name, final String desc) {
@@ -1481,7 +1487,8 @@ public class ClassWriter extends ClassVisitor {
* Adds the given internal name to {@link #typeTable} and returns its index.
* Does nothing if the type table already contains this internal name.
*
- * @param type the internal name to be added to the type table.
+ * @param type
+ * the internal name to be added to the type table.
* @return the index of this internal name in the type table.
*/
int addType(final String type) {
@@ -1498,9 +1505,11 @@ public class ClassWriter extends ClassVisitor {
* index. This method is used for UNINITIALIZED types, made of an internal
* name and a bytecode offset.
*
- * @param type the internal name to be added to the type table.
- * @param offset the bytecode offset of the NEW instruction that created
- * this UNINITIALIZED type value.
+ * @param type
+ * the internal name to be added to the type table.
+ * @param offset
+ * the bytecode offset of the NEW instruction that created this
+ * UNINITIALIZED type value.
* @return the index of this internal name in the type table.
*/
int addUninitializedType(final String type, final int offset) {
@@ -1518,7 +1527,8 @@ public class ClassWriter extends ClassVisitor {
/**
* Adds the given Item to {@link #typeTable}.
*
- * @param item the value to be added to the type table.
+ * @param item
+ * the value to be added to the type table.
* @return the added Item, which a new Item instance with the same value as
* the given Item.
*/
@@ -1544,8 +1554,10 @@ public class ClassWriter extends ClassVisitor {
* {@link #items} hash table to speedup future calls with the same
* parameters.
*
- * @param type1 index of an internal name in {@link #typeTable}.
- * @param type2 index of an internal name in {@link #typeTable}.
+ * @param type1
+ * index of an internal name in {@link #typeTable}.
+ * @param type2
+ * index of an internal name in {@link #typeTable}.
* @return the index of the common super type of the two given types.
*/
int getMergedType(final int type1, final int type2) {
@@ -1572,13 +1584,14 @@ public class ClassWriter extends ClassVisitor {
* that is currently being generated by this ClassWriter, which can of
* course not be loaded since it is under construction.
*
- * @param type1 the internal name of a class.
- * @param type2 the internal name of another class.
+ * @param type1
+ * the internal name of a class.
+ * @param type2
+ * the internal name of another class.
* @return the internal name of the common super class of the two given
* classes.
*/
- protected String getCommonSuperClass(final String type1, final String type2)
- {
+ protected String getCommonSuperClass(final String type1, final String type2) {
Class<?> c, d;
ClassLoader classLoader = getClass().getClassLoader();
try {
@@ -1607,7 +1620,8 @@ public class ClassWriter extends ClassVisitor {
* Returns the constant pool's hash table item which is equal to the given
* item.
*
- * @param key a constant pool item.
+ * @param key
+ * a constant pool item.
* @return the constant pool's hash table item which is equal to the given
* item, or <tt>null</tt> if there is no such item.
*/
@@ -1623,7 +1637,8 @@ public class ClassWriter extends ClassVisitor {
* Puts the given item in the constant pool's hash table. The hash table
* <i>must</i> not already contains this item.
*
- * @param i the item to be added to the constant pool's hash table.
+ * @param i
+ * the item to be added to the constant pool's hash table.
*/
private void put(final Item i) {
if (index + typeCount > threshold) {
@@ -1651,9 +1666,12 @@ public class ClassWriter extends ClassVisitor {
/**
* Puts one byte and two shorts into the constant pool.
*
- * @param b a byte.
- * @param s1 a short.
- * @param s2 another short.
+ * @param b
+ * a byte.
+ * @param s1
+ * a short.
+ * @param s2
+ * another short.
*/
private void put122(final int b, final int s1, final int s2) {
pool.put12(b, s1).putShort(s2);
@@ -1662,9 +1680,12 @@ public class ClassWriter extends ClassVisitor {
/**
* Puts two bytes and one short into the constant pool.
*
- * @param b1 a byte.
- * @param b2 another byte.
- * @param s a short.
+ * @param b1
+ * a byte.
+ * @param b2
+ * another byte.
+ * @param s
+ * a short.
*/
private void put112(final int b1, final int b2, final int s) {
pool.put11(b1, b2).putShort(s);
diff --git a/src/asm/scala/tools/asm/Context.java b/src/asm/scala/tools/asm/Context.java
new file mode 100644
index 0000000000..7b3a2ad9dd
--- /dev/null
+++ b/src/asm/scala/tools/asm/Context.java
@@ -0,0 +1,110 @@
+/***
+ * ASM: a very small and fast Java bytecode manipulation framework
+ * Copyright (c) 2000-2011 INRIA, France Telecom
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the copyright holders nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package scala.tools.asm;
+
+/**
+ * Information about a class being parsed in a {@link ClassReader}.
+ *
+ * @author Eric Bruneton
+ */
+class Context {
+
+ /**
+ * Prototypes of the attributes that must be parsed for this class.
+ */
+ Attribute[] attrs;
+
+ /**
+ * The {@link ClassReader} option flags for the parsing of this class.
+ */
+ int flags;
+
+ /**
+ * The buffer used to read strings.
+ */
+ char[] buffer;
+
+ /**
+ * The start index of each bootstrap method.
+ */
+ int[] bootstrapMethods;
+
+ /**
+ * The access flags of the method currently being parsed.
+ */
+ int access;
+
+ /**
+ * The name of the method currently being parsed.
+ */
+ String name;
+
+ /**
+ * The descriptor of the method currently being parsed.
+ */
+ String desc;
+
+ /**
+ * The offset of the latest stack map frame that has been parsed.
+ */
+ int offset;
+
+ /**
+ * The encoding of the latest stack map frame that has been parsed.
+ */
+ int mode;
+
+ /**
+ * The number of locals in the latest stack map frame that has been parsed.
+ */
+ int localCount;
+
+ /**
+ * The number locals in the latest stack map frame that has been parsed,
+ * minus the number of locals in the previous frame.
+ */
+ int localDiff;
+
+ /**
+ * The local values of the latest stack map frame that has been parsed.
+ */
+ Object[] local;
+
+ /**
+ * The stack size of the latest stack map frame that has been parsed.
+ */
+ int stackCount;
+
+ /**
+ * The stack values of the latest stack map frame that has been parsed.
+ */
+ Object[] stack;
+}
diff --git a/src/asm/scala/tools/asm/FieldVisitor.java b/src/asm/scala/tools/asm/FieldVisitor.java
index 9ac0f6236f..9171f331e5 100644
--- a/src/asm/scala/tools/asm/FieldVisitor.java
+++ b/src/asm/scala/tools/asm/FieldVisitor.java
@@ -30,9 +30,9 @@
package scala.tools.asm;
/**
- * A visitor to visit a Java field. The methods of this class must be called
- * in the following order: ( <tt>visitAnnotation</tt> |
- * <tt>visitAttribute</tt> )* <tt>visitEnd</tt>.
+ * A visitor to visit a Java field. The methods of this class must be called in
+ * the following order: ( <tt>visitAnnotation</tt> | <tt>visitAttribute</tt> )*
+ * <tt>visitEnd</tt>.
*
* @author Eric Bruneton
*/
@@ -53,8 +53,9 @@ public abstract class FieldVisitor {
/**
* Constructs a new {@link FieldVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public FieldVisitor(final int api) {
this(api, null);
@@ -63,15 +64,17 @@ public abstract class FieldVisitor {
/**
* Constructs a new {@link FieldVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param fv the field visitor to which this visitor must delegate method
- * calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv
+ * the field visitor to which this visitor must delegate method
+ * calls. May be null.
*/
public FieldVisitor(final int api, final FieldVisitor fv) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.fv = fv;
}
@@ -79,8 +82,10 @@ public abstract class FieldVisitor {
/**
* Visits an annotation of the field.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
@@ -94,7 +99,8 @@ public abstract class FieldVisitor {
/**
* Visits a non standard attribute of the field.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(Attribute attr) {
if (fv != null) {
diff --git a/src/asm/scala/tools/asm/FieldWriter.java b/src/asm/scala/tools/asm/FieldWriter.java
index 45ef6d0df3..02c6059b91 100644
--- a/src/asm/scala/tools/asm/FieldWriter.java
+++ b/src/asm/scala/tools/asm/FieldWriter.java
@@ -92,21 +92,21 @@ final class FieldWriter extends FieldVisitor {
/**
* Constructs a new {@link FieldWriter}.
*
- * @param cw the class writer to which this field must be added.
- * @param access the field's access flags (see {@link Opcodes}).
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link Type}).
- * @param signature the field's signature. May be <tt>null</tt>.
- * @param value the field's constant value. May be <tt>null</tt>.
+ * @param cw
+ * the class writer to which this field must be added.
+ * @param access
+ * the field's access flags (see {@link Opcodes}).
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link Type}).
+ * @param signature
+ * the field's signature. May be <tt>null</tt>.
+ * @param value
+ * the field's constant value. May be <tt>null</tt>.
*/
- FieldWriter(
- final ClassWriter cw,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ FieldWriter(final ClassWriter cw, final int access, final String name,
+ final String desc, final String signature, final Object value) {
super(Opcodes.ASM4);
if (cw.firstField == null) {
cw.firstField = this;
@@ -131,10 +131,8 @@ final class FieldWriter extends FieldVisitor {
// ------------------------------------------------------------------------
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -177,11 +175,12 @@ final class FieldWriter extends FieldVisitor {
cw.newUTF8("ConstantValue");
size += 8;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- cw.newUTF8("Synthetic");
- size += 6;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
cw.newUTF8("Deprecated");
@@ -208,21 +207,23 @@ final class FieldWriter extends FieldVisitor {
/**
* Puts the content of this field into the given byte vector.
*
- * @param out where the content of this field must be put.
+ * @param out
+ * where the content of this field must be put.
*/
void put(final ByteVector out) {
- int mask = Opcodes.ACC_DEPRECATED
- | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
+ int mask = Opcodes.ACC_DEPRECATED | ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
out.putShort(access & ~mask).putShort(name).putShort(desc);
int attributeCount = 0;
if (value != 0) {
++attributeCount;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- ++attributeCount;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ ++attributeCount;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
++attributeCount;
@@ -244,10 +245,11 @@ final class FieldWriter extends FieldVisitor {
out.putShort(cw.newUTF8("ConstantValue"));
out.putInt(2).putShort(value);
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(cw.newUTF8("Deprecated")).putInt(0);
diff --git a/src/asm/scala/tools/asm/Frame.java b/src/asm/scala/tools/asm/Frame.java
index 387b56796d..bcc3e8450b 100644
--- a/src/asm/scala/tools/asm/Frame.java
+++ b/src/asm/scala/tools/asm/Frame.java
@@ -80,13 +80,13 @@ final class Frame {
* table contains only internal type names (array type descriptors are
* forbidden - dimensions must be represented through the DIM field).
*
- * The LONG and DOUBLE types are always represented by using two slots (LONG +
- * TOP or DOUBLE + TOP), for local variable types as well as in the operand
- * stack. This is necessary to be able to simulate DUPx_y instructions,
- * whose effect would be dependent on the actual type values if types were
- * always represented by a single slot in the stack (and this is not
- * possible, since actual type values are not always known - cf LOCAL and
- * STACK type kinds).
+ * The LONG and DOUBLE types are always represented by using two slots (LONG
+ * + TOP or DOUBLE + TOP), for local variable types as well as in the
+ * operand stack. This is necessary to be able to simulate DUPx_y
+ * instructions, whose effect would be dependent on the actual type values
+ * if types were always represented by a single slot in the stack (and this
+ * is not possible, since actual type values are not always known - cf LOCAL
+ * and STACK type kinds).
*/
/**
@@ -117,9 +117,9 @@ final class Frame {
/**
* Flag used for LOCAL and STACK types. Indicates that if this type happens
* to be a long or double type (during the computations of input frames),
- * then it must be set to TOP because the second word of this value has
- * been reused to store other data in the basic block. Hence the first word
- * no longer stores a valid long or double value.
+ * then it must be set to TOP because the second word of this value has been
+ * reused to store other data in the basic block. Hence the first word no
+ * longer stores a valid long or double value.
*/
static final int TOP_IF_LONG_OR_DOUBLE = 0x800000;
@@ -523,7 +523,8 @@ final class Frame {
/**
* Returns the output frame local variable type at the given index.
*
- * @param local the index of the local that must be returned.
+ * @param local
+ * the index of the local that must be returned.
* @return the output frame local variable type at the given index.
*/
private int get(final int local) {
@@ -545,8 +546,10 @@ final class Frame {
/**
* Sets the output frame local variable type at the given index.
*
- * @param local the index of the local that must be set.
- * @param type the value of the local that must be set.
+ * @param local
+ * the index of the local that must be set.
+ * @param type
+ * the value of the local that must be set.
*/
private void set(final int local, final int type) {
// creates and/or resizes the output local variables array if necessary
@@ -566,7 +569,8 @@ final class Frame {
/**
* Pushes a new type onto the output frame stack.
*
- * @param type the type that must be pushed.
+ * @param type
+ * the type that must be pushed.
*/
private void push(final int type) {
// creates and/or resizes the output stack array if necessary
@@ -591,10 +595,12 @@ final class Frame {
/**
* Pushes a new type onto the output frame stack.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param desc the descriptor of the type to be pushed. Can also be a method
- * descriptor (in this case this method pushes its return type onto
- * the output frame stack).
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param desc
+ * the descriptor of the type to be pushed. Can also be a method
+ * descriptor (in this case this method pushes its return type
+ * onto the output frame stack).
*/
private void push(final ClassWriter cw, final String desc) {
int type = type(cw, desc);
@@ -609,72 +615,74 @@ final class Frame {
/**
* Returns the int encoding of the given type.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param desc a type descriptor.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param desc
+ * a type descriptor.
* @return the int encoding of the given type.
*/
private static int type(final ClassWriter cw, final String desc) {
String t;
int index = desc.charAt(0) == '(' ? desc.indexOf(')') + 1 : 0;
switch (desc.charAt(index)) {
- case 'V':
- return 0;
+ case 'V':
+ return 0;
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ return INTEGER;
+ case 'F':
+ return FLOAT;
+ case 'J':
+ return LONG;
+ case 'D':
+ return DOUBLE;
+ case 'L':
+ // stores the internal name, not the descriptor!
+ t = desc.substring(index + 1, desc.length() - 1);
+ return OBJECT | cw.addType(t);
+ // case '[':
+ default:
+ // extracts the dimensions and the element type
+ int data;
+ int dims = index + 1;
+ while (desc.charAt(dims) == '[') {
+ ++dims;
+ }
+ switch (desc.charAt(dims)) {
case 'Z':
+ data = BOOLEAN;
+ break;
case 'C':
+ data = CHAR;
+ break;
case 'B':
+ data = BYTE;
+ break;
case 'S':
+ data = SHORT;
+ break;
case 'I':
- return INTEGER;
+ data = INTEGER;
+ break;
case 'F':
- return FLOAT;
+ data = FLOAT;
+ break;
case 'J':
- return LONG;
+ data = LONG;
+ break;
case 'D':
- return DOUBLE;
- case 'L':
- // stores the internal name, not the descriptor!
- t = desc.substring(index + 1, desc.length() - 1);
- return OBJECT | cw.addType(t);
- // case '[':
+ data = DOUBLE;
+ break;
+ // case 'L':
default:
- // extracts the dimensions and the element type
- int data;
- int dims = index + 1;
- while (desc.charAt(dims) == '[') {
- ++dims;
- }
- switch (desc.charAt(dims)) {
- case 'Z':
- data = BOOLEAN;
- break;
- case 'C':
- data = CHAR;
- break;
- case 'B':
- data = BYTE;
- break;
- case 'S':
- data = SHORT;
- break;
- case 'I':
- data = INTEGER;
- break;
- case 'F':
- data = FLOAT;
- break;
- case 'J':
- data = LONG;
- break;
- case 'D':
- data = DOUBLE;
- break;
- // case 'L':
- default:
- // stores the internal name, not the descriptor
- t = desc.substring(dims + 1, desc.length() - 1);
- data = OBJECT | cw.addType(t);
- }
- return (dims - index) << 28 | data;
+ // stores the internal name, not the descriptor
+ t = desc.substring(dims + 1, desc.length() - 1);
+ data = OBJECT | cw.addType(t);
+ }
+ return (dims - index) << 28 | data;
}
}
@@ -695,7 +703,8 @@ final class Frame {
/**
* Pops the given number of types from the output frame stack.
*
- * @param elements the number of types that must be popped.
+ * @param elements
+ * the number of types that must be popped.
*/
private void pop(final int elements) {
if (outputStackTop >= elements) {
@@ -712,9 +721,10 @@ final class Frame {
/**
* Pops a type from the output frame stack.
*
- * @param desc the descriptor of the type to be popped. Can also be a method
- * descriptor (in this case this method pops the types corresponding
- * to the method arguments).
+ * @param desc
+ * the descriptor of the type to be popped. Can also be a method
+ * descriptor (in this case this method pops the types
+ * corresponding to the method arguments).
*/
private void pop(final String desc) {
char c = desc.charAt(0);
@@ -731,7 +741,8 @@ final class Frame {
* Adds a new type to the list of types on which a constructor is invoked in
* the basic block.
*
- * @param var a type on a which a constructor is invoked.
+ * @param var
+ * a type on a which a constructor is invoked.
*/
private void init(final int var) {
// creates and/or resizes the initializations array if necessary
@@ -752,8 +763,10 @@ final class Frame {
* Replaces the given type with the appropriate type if it is one of the
* types on which a constructor is invoked in the basic block.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param t a type
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param t
+ * a type
* @return t or, if t is one of the types on which a constructor is invoked
* in the basic block, the type corresponding to this constructor.
*/
@@ -787,17 +800,17 @@ final class Frame {
* Initializes the input frame of the first basic block from the method
* descriptor.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param access the access flags of the method to which this label belongs.
- * @param args the formal parameter types of this method.
- * @param maxLocals the maximum number of local variables of this method.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param access
+ * the access flags of the method to which this label belongs.
+ * @param args
+ * the formal parameter types of this method.
+ * @param maxLocals
+ * the maximum number of local variables of this method.
*/
- void initInputFrame(
- final ClassWriter cw,
- final int access,
- final Type[] args,
- final int maxLocals)
- {
+ void initInputFrame(final ClassWriter cw, final int access,
+ final Type[] args, final int maxLocals) {
inputLocals = new int[maxLocals];
inputStack = new int[0];
int i = 0;
@@ -823,435 +836,435 @@ final class Frame {
/**
* Simulates the action of the given instruction on the output stack frame.
*
- * @param opcode the opcode of the instruction.
- * @param arg the operand of the instruction, if any.
- * @param cw the class writer to which this label belongs.
- * @param item the operand of the instructions, if any.
+ * @param opcode
+ * the opcode of the instruction.
+ * @param arg
+ * the operand of the instruction, if any.
+ * @param cw
+ * the class writer to which this label belongs.
+ * @param item
+ * the operand of the instructions, if any.
*/
- void execute(
- final int opcode,
- final int arg,
- final ClassWriter cw,
- final Item item)
- {
+ void execute(final int opcode, final int arg, final ClassWriter cw,
+ final Item item) {
int t1, t2, t3, t4;
switch (opcode) {
- case Opcodes.NOP:
- case Opcodes.INEG:
- case Opcodes.LNEG:
- case Opcodes.FNEG:
- case Opcodes.DNEG:
- case Opcodes.I2B:
- case Opcodes.I2C:
- case Opcodes.I2S:
- case Opcodes.GOTO:
- case Opcodes.RETURN:
- break;
- case Opcodes.ACONST_NULL:
- push(NULL);
- break;
- case Opcodes.ICONST_M1:
- case Opcodes.ICONST_0:
- case Opcodes.ICONST_1:
- case Opcodes.ICONST_2:
- case Opcodes.ICONST_3:
- case Opcodes.ICONST_4:
- case Opcodes.ICONST_5:
- case Opcodes.BIPUSH:
- case Opcodes.SIPUSH:
- case Opcodes.ILOAD:
+ case Opcodes.NOP:
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ case Opcodes.GOTO:
+ case Opcodes.RETURN:
+ break;
+ case Opcodes.ACONST_NULL:
+ push(NULL);
+ break;
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.ILOAD:
+ push(INTEGER);
+ break;
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.LLOAD:
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.FLOAD:
+ push(FLOAT);
+ break;
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.DLOAD:
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LDC:
+ switch (item.type) {
+ case ClassWriter.INT:
push(INTEGER);
break;
- case Opcodes.LCONST_0:
- case Opcodes.LCONST_1:
- case Opcodes.LLOAD:
+ case ClassWriter.LONG:
push(LONG);
push(TOP);
break;
- case Opcodes.FCONST_0:
- case Opcodes.FCONST_1:
- case Opcodes.FCONST_2:
- case Opcodes.FLOAD:
+ case ClassWriter.FLOAT:
push(FLOAT);
break;
- case Opcodes.DCONST_0:
- case Opcodes.DCONST_1:
- case Opcodes.DLOAD:
+ case ClassWriter.DOUBLE:
push(DOUBLE);
push(TOP);
break;
- case Opcodes.LDC:
- switch (item.type) {
- case ClassWriter.INT:
- push(INTEGER);
- break;
- case ClassWriter.LONG:
- push(LONG);
- push(TOP);
- break;
- case ClassWriter.FLOAT:
- push(FLOAT);
- break;
- case ClassWriter.DOUBLE:
- push(DOUBLE);
- push(TOP);
- break;
- case ClassWriter.CLASS:
- push(OBJECT | cw.addType("java/lang/Class"));
- break;
- case ClassWriter.STR:
- push(OBJECT | cw.addType("java/lang/String"));
- break;
- case ClassWriter.MTYPE:
- push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
- break;
- // case ClassWriter.HANDLE_BASE + [1..9]:
- default:
- push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
- }
- break;
- case Opcodes.ALOAD:
- push(get(arg));
- break;
- case Opcodes.IALOAD:
- case Opcodes.BALOAD:
- case Opcodes.CALOAD:
- case Opcodes.SALOAD:
- pop(2);
- push(INTEGER);
- break;
- case Opcodes.LALOAD:
- case Opcodes.D2L:
- pop(2);
- push(LONG);
- push(TOP);
+ case ClassWriter.CLASS:
+ push(OBJECT | cw.addType("java/lang/Class"));
break;
- case Opcodes.FALOAD:
- pop(2);
- push(FLOAT);
+ case ClassWriter.STR:
+ push(OBJECT | cw.addType("java/lang/String"));
break;
- case Opcodes.DALOAD:
- case Opcodes.L2D:
- pop(2);
- push(DOUBLE);
- push(TOP);
+ case ClassWriter.MTYPE:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodType"));
break;
- case Opcodes.AALOAD:
- pop(1);
- t1 = pop();
- push(ELEMENT_OF + t1);
- break;
- case Opcodes.ISTORE:
- case Opcodes.FSTORE:
- case Opcodes.ASTORE:
- t1 = pop();
- set(arg, t1);
- if (arg > 0) {
- t2 = get(arg - 1);
- // if t2 is of kind STACK or LOCAL we cannot know its size!
- if (t2 == LONG || t2 == DOUBLE) {
- set(arg - 1, TOP);
- } else if ((t2 & KIND) != BASE) {
- set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
- }
+ // case ClassWriter.HANDLE_BASE + [1..9]:
+ default:
+ push(OBJECT | cw.addType("java/lang/invoke/MethodHandle"));
+ }
+ break;
+ case Opcodes.ALOAD:
+ push(get(arg));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LALOAD:
+ case Opcodes.D2L:
+ pop(2);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FALOAD:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DALOAD:
+ case Opcodes.L2D:
+ pop(2);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.AALOAD:
+ pop(1);
+ t1 = pop();
+ push(ELEMENT_OF + t1);
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.ASTORE:
+ t1 = pop();
+ set(arg, t1);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
}
- break;
- case Opcodes.LSTORE:
- case Opcodes.DSTORE:
- pop(1);
- t1 = pop();
- set(arg, t1);
- set(arg + 1, TOP);
- if (arg > 0) {
- t2 = get(arg - 1);
- // if t2 is of kind STACK or LOCAL we cannot know its size!
- if (t2 == LONG || t2 == DOUBLE) {
- set(arg - 1, TOP);
- } else if ((t2 & KIND) != BASE) {
- set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
- }
+ }
+ break;
+ case Opcodes.LSTORE:
+ case Opcodes.DSTORE:
+ pop(1);
+ t1 = pop();
+ set(arg, t1);
+ set(arg + 1, TOP);
+ if (arg > 0) {
+ t2 = get(arg - 1);
+ // if t2 is of kind STACK or LOCAL we cannot know its size!
+ if (t2 == LONG || t2 == DOUBLE) {
+ set(arg - 1, TOP);
+ } else if ((t2 & KIND) != BASE) {
+ set(arg - 1, t2 | TOP_IF_LONG_OR_DOUBLE);
}
- break;
- case Opcodes.IASTORE:
- case Opcodes.BASTORE:
- case Opcodes.CASTORE:
- case Opcodes.SASTORE:
- case Opcodes.FASTORE:
- case Opcodes.AASTORE:
- pop(3);
- break;
- case Opcodes.LASTORE:
- case Opcodes.DASTORE:
- pop(4);
- break;
- case Opcodes.POP:
- case Opcodes.IFEQ:
- case Opcodes.IFNE:
- case Opcodes.IFLT:
- case Opcodes.IFGE:
- case Opcodes.IFGT:
- case Opcodes.IFLE:
- case Opcodes.IRETURN:
- case Opcodes.FRETURN:
- case Opcodes.ARETURN:
- case Opcodes.TABLESWITCH:
- case Opcodes.LOOKUPSWITCH:
- case Opcodes.ATHROW:
- case Opcodes.MONITORENTER:
- case Opcodes.MONITOREXIT:
- case Opcodes.IFNULL:
- case Opcodes.IFNONNULL:
- pop(1);
- break;
- case Opcodes.POP2:
- case Opcodes.IF_ICMPEQ:
- case Opcodes.IF_ICMPNE:
- case Opcodes.IF_ICMPLT:
- case Opcodes.IF_ICMPGE:
- case Opcodes.IF_ICMPGT:
- case Opcodes.IF_ICMPLE:
- case Opcodes.IF_ACMPEQ:
- case Opcodes.IF_ACMPNE:
- case Opcodes.LRETURN:
- case Opcodes.DRETURN:
- pop(2);
- break;
- case Opcodes.DUP:
- t1 = pop();
- push(t1);
- push(t1);
- break;
- case Opcodes.DUP_X1:
- t1 = pop();
- t2 = pop();
- push(t1);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP_X2:
- t1 = pop();
- t2 = pop();
- t3 = pop();
- push(t1);
- push(t3);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP2:
- t1 = pop();
- t2 = pop();
- push(t2);
- push(t1);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP2_X1:
- t1 = pop();
- t2 = pop();
- t3 = pop();
- push(t2);
- push(t1);
- push(t3);
- push(t2);
- push(t1);
- break;
- case Opcodes.DUP2_X2:
- t1 = pop();
- t2 = pop();
- t3 = pop();
- t4 = pop();
- push(t2);
- push(t1);
- push(t4);
- push(t3);
- push(t2);
- push(t1);
- break;
- case Opcodes.SWAP:
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.AASTORE:
+ pop(3);
+ break;
+ case Opcodes.LASTORE:
+ case Opcodes.DASTORE:
+ pop(4);
+ break;
+ case Opcodes.POP:
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ case Opcodes.IRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.ARETURN:
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ case Opcodes.ATHROW:
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ pop(1);
+ break;
+ case Opcodes.POP2:
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ case Opcodes.LRETURN:
+ case Opcodes.DRETURN:
+ pop(2);
+ break;
+ case Opcodes.DUP:
+ t1 = pop();
+ push(t1);
+ push(t1);
+ break;
+ case Opcodes.DUP_X1:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2:
+ t1 = pop();
+ t2 = pop();
+ push(t2);
+ push(t1);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X1:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ push(t2);
+ push(t1);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.DUP2_X2:
+ t1 = pop();
+ t2 = pop();
+ t3 = pop();
+ t4 = pop();
+ push(t2);
+ push(t1);
+ push(t4);
+ push(t3);
+ push(t2);
+ push(t1);
+ break;
+ case Opcodes.SWAP:
+ t1 = pop();
+ t2 = pop();
+ push(t1);
+ push(t2);
+ break;
+ case Opcodes.IADD:
+ case Opcodes.ISUB:
+ case Opcodes.IMUL:
+ case Opcodes.IDIV:
+ case Opcodes.IREM:
+ case Opcodes.IAND:
+ case Opcodes.IOR:
+ case Opcodes.IXOR:
+ case Opcodes.ISHL:
+ case Opcodes.ISHR:
+ case Opcodes.IUSHR:
+ case Opcodes.L2I:
+ case Opcodes.D2I:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ pop(2);
+ push(INTEGER);
+ break;
+ case Opcodes.LADD:
+ case Opcodes.LSUB:
+ case Opcodes.LMUL:
+ case Opcodes.LDIV:
+ case Opcodes.LREM:
+ case Opcodes.LAND:
+ case Opcodes.LOR:
+ case Opcodes.LXOR:
+ pop(4);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.FADD:
+ case Opcodes.FSUB:
+ case Opcodes.FMUL:
+ case Opcodes.FDIV:
+ case Opcodes.FREM:
+ case Opcodes.L2F:
+ case Opcodes.D2F:
+ pop(2);
+ push(FLOAT);
+ break;
+ case Opcodes.DADD:
+ case Opcodes.DSUB:
+ case Opcodes.DMUL:
+ case Opcodes.DDIV:
+ case Opcodes.DREM:
+ pop(4);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.LSHL:
+ case Opcodes.LSHR:
+ case Opcodes.LUSHR:
+ pop(3);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.IINC:
+ set(arg, INTEGER);
+ break;
+ case Opcodes.I2L:
+ case Opcodes.F2L:
+ pop(1);
+ push(LONG);
+ push(TOP);
+ break;
+ case Opcodes.I2F:
+ pop(1);
+ push(FLOAT);
+ break;
+ case Opcodes.I2D:
+ case Opcodes.F2D:
+ pop(1);
+ push(DOUBLE);
+ push(TOP);
+ break;
+ case Opcodes.F2I:
+ case Opcodes.ARRAYLENGTH:
+ case Opcodes.INSTANCEOF:
+ pop(1);
+ push(INTEGER);
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ pop(4);
+ push(INTEGER);
+ break;
+ case Opcodes.JSR:
+ case Opcodes.RET:
+ throw new RuntimeException(
+ "JSR/RET are not supported with computeFrames option");
+ case Opcodes.GETSTATIC:
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTSTATIC:
+ pop(item.strVal3);
+ break;
+ case Opcodes.GETFIELD:
+ pop(1);
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.PUTFIELD:
+ pop(item.strVal3);
+ pop();
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE:
+ pop(item.strVal3);
+ if (opcode != Opcodes.INVOKESTATIC) {
t1 = pop();
- t2 = pop();
- push(t1);
- push(t2);
- break;
- case Opcodes.IADD:
- case Opcodes.ISUB:
- case Opcodes.IMUL:
- case Opcodes.IDIV:
- case Opcodes.IREM:
- case Opcodes.IAND:
- case Opcodes.IOR:
- case Opcodes.IXOR:
- case Opcodes.ISHL:
- case Opcodes.ISHR:
- case Opcodes.IUSHR:
- case Opcodes.L2I:
- case Opcodes.D2I:
- case Opcodes.FCMPL:
- case Opcodes.FCMPG:
- pop(2);
- push(INTEGER);
- break;
- case Opcodes.LADD:
- case Opcodes.LSUB:
- case Opcodes.LMUL:
- case Opcodes.LDIV:
- case Opcodes.LREM:
- case Opcodes.LAND:
- case Opcodes.LOR:
- case Opcodes.LXOR:
- pop(4);
- push(LONG);
- push(TOP);
- break;
- case Opcodes.FADD:
- case Opcodes.FSUB:
- case Opcodes.FMUL:
- case Opcodes.FDIV:
- case Opcodes.FREM:
- case Opcodes.L2F:
- case Opcodes.D2F:
- pop(2);
- push(FLOAT);
- break;
- case Opcodes.DADD:
- case Opcodes.DSUB:
- case Opcodes.DMUL:
- case Opcodes.DDIV:
- case Opcodes.DREM:
- pop(4);
- push(DOUBLE);
- push(TOP);
- break;
- case Opcodes.LSHL:
- case Opcodes.LSHR:
- case Opcodes.LUSHR:
- pop(3);
- push(LONG);
- push(TOP);
- break;
- case Opcodes.IINC:
- set(arg, INTEGER);
- break;
- case Opcodes.I2L:
- case Opcodes.F2L:
- pop(1);
- push(LONG);
- push(TOP);
- break;
- case Opcodes.I2F:
- pop(1);
- push(FLOAT);
- break;
- case Opcodes.I2D:
- case Opcodes.F2D:
- pop(1);
- push(DOUBLE);
- push(TOP);
- break;
- case Opcodes.F2I:
- case Opcodes.ARRAYLENGTH:
- case Opcodes.INSTANCEOF:
- pop(1);
- push(INTEGER);
- break;
- case Opcodes.LCMP:
- case Opcodes.DCMPL:
- case Opcodes.DCMPG:
- pop(4);
- push(INTEGER);
- break;
- case Opcodes.JSR:
- case Opcodes.RET:
- throw new RuntimeException("JSR/RET are not supported with computeFrames option");
- case Opcodes.GETSTATIC:
- push(cw, item.strVal3);
- break;
- case Opcodes.PUTSTATIC:
- pop(item.strVal3);
- break;
- case Opcodes.GETFIELD:
- pop(1);
- push(cw, item.strVal3);
- break;
- case Opcodes.PUTFIELD:
- pop(item.strVal3);
- pop();
- break;
- case Opcodes.INVOKEVIRTUAL:
- case Opcodes.INVOKESPECIAL:
- case Opcodes.INVOKESTATIC:
- case Opcodes.INVOKEINTERFACE:
- pop(item.strVal3);
- if (opcode != Opcodes.INVOKESTATIC) {
- t1 = pop();
- if (opcode == Opcodes.INVOKESPECIAL
- && item.strVal2.charAt(0) == '<')
- {
- init(t1);
- }
+ if (opcode == Opcodes.INVOKESPECIAL
+ && item.strVal2.charAt(0) == '<') {
+ init(t1);
}
- push(cw, item.strVal3);
+ }
+ push(cw, item.strVal3);
+ break;
+ case Opcodes.INVOKEDYNAMIC:
+ pop(item.strVal2);
+ push(cw, item.strVal2);
+ break;
+ case Opcodes.NEW:
+ push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+ break;
+ case Opcodes.NEWARRAY:
+ pop();
+ switch (arg) {
+ case Opcodes.T_BOOLEAN:
+ push(ARRAY_OF | BOOLEAN);
break;
- case Opcodes.INVOKEDYNAMIC:
- pop(item.strVal2);
- push(cw, item.strVal2);
+ case Opcodes.T_CHAR:
+ push(ARRAY_OF | CHAR);
break;
- case Opcodes.NEW:
- push(UNINITIALIZED | cw.addUninitializedType(item.strVal1, arg));
+ case Opcodes.T_BYTE:
+ push(ARRAY_OF | BYTE);
break;
- case Opcodes.NEWARRAY:
- pop();
- switch (arg) {
- case Opcodes.T_BOOLEAN:
- push(ARRAY_OF | BOOLEAN);
- break;
- case Opcodes.T_CHAR:
- push(ARRAY_OF | CHAR);
- break;
- case Opcodes.T_BYTE:
- push(ARRAY_OF | BYTE);
- break;
- case Opcodes.T_SHORT:
- push(ARRAY_OF | SHORT);
- break;
- case Opcodes.T_INT:
- push(ARRAY_OF | INTEGER);
- break;
- case Opcodes.T_FLOAT:
- push(ARRAY_OF | FLOAT);
- break;
- case Opcodes.T_DOUBLE:
- push(ARRAY_OF | DOUBLE);
- break;
- // case Opcodes.T_LONG:
- default:
- push(ARRAY_OF | LONG);
- break;
- }
+ case Opcodes.T_SHORT:
+ push(ARRAY_OF | SHORT);
break;
- case Opcodes.ANEWARRAY:
- String s = item.strVal1;
- pop();
- if (s.charAt(0) == '[') {
- push(cw, '[' + s);
- } else {
- push(ARRAY_OF | OBJECT | cw.addType(s));
- }
+ case Opcodes.T_INT:
+ push(ARRAY_OF | INTEGER);
break;
- case Opcodes.CHECKCAST:
- s = item.strVal1;
- pop();
- if (s.charAt(0) == '[') {
- push(cw, s);
- } else {
- push(OBJECT | cw.addType(s));
- }
+ case Opcodes.T_FLOAT:
+ push(ARRAY_OF | FLOAT);
break;
- // case Opcodes.MULTIANEWARRAY:
+ case Opcodes.T_DOUBLE:
+ push(ARRAY_OF | DOUBLE);
+ break;
+ // case Opcodes.T_LONG:
default:
- pop(arg);
- push(cw, item.strVal1);
+ push(ARRAY_OF | LONG);
break;
+ }
+ break;
+ case Opcodes.ANEWARRAY:
+ String s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, '[' + s);
+ } else {
+ push(ARRAY_OF | OBJECT | cw.addType(s));
+ }
+ break;
+ case Opcodes.CHECKCAST:
+ s = item.strVal1;
+ pop();
+ if (s.charAt(0) == '[') {
+ push(cw, s);
+ } else {
+ push(OBJECT | cw.addType(s));
+ }
+ break;
+ // case Opcodes.MULTIANEWARRAY:
+ default:
+ pop(arg);
+ push(cw, item.strVal1);
+ break;
}
}
@@ -1260,10 +1273,13 @@ final class Frame {
* frames of this basic block. Returns <tt>true</tt> if the input frame of
* the given label has been changed by this operation.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param frame the basic block whose input frame must be updated.
- * @param edge the kind of the {@link Edge} between this label and 'label'.
- * See {@link Edge#info}.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param frame
+ * the basic block whose input frame must be updated.
+ * @param edge
+ * the kind of the {@link Edge} between this label and 'label'.
+ * See {@link Edge#info}.
* @return <tt>true</tt> if the input frame of the given label has been
* changed by this operation.
*/
@@ -1294,7 +1310,8 @@ final class Frame {
} else {
t = dim + inputStack[nStack - (s & VALUE)];
}
- if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0
+ && (t == LONG || t == DOUBLE)) {
t = TOP;
}
}
@@ -1346,7 +1363,8 @@ final class Frame {
} else {
t = dim + inputStack[nStack - (s & VALUE)];
}
- if ((s & TOP_IF_LONG_OR_DOUBLE) != 0 && (t == LONG || t == DOUBLE)) {
+ if ((s & TOP_IF_LONG_OR_DOUBLE) != 0
+ && (t == LONG || t == DOUBLE)) {
t = TOP;
}
}
@@ -1363,19 +1381,19 @@ final class Frame {
* type. Returns <tt>true</tt> if the type array has been modified by this
* operation.
*
- * @param cw the ClassWriter to which this label belongs.
- * @param t the type with which the type array element must be merged.
- * @param types an array of types.
- * @param index the index of the type that must be merged in 'types'.
+ * @param cw
+ * the ClassWriter to which this label belongs.
+ * @param t
+ * the type with which the type array element must be merged.
+ * @param types
+ * an array of types.
+ * @param index
+ * the index of the type that must be merged in 'types'.
* @return <tt>true</tt> if the type array has been modified by this
* operation.
*/
- private static boolean merge(
- final ClassWriter cw,
- int t,
- final int[] types,
- final int index)
- {
+ private static boolean merge(final ClassWriter cw, int t,
+ final int[] types, final int index) {
int u = types[index];
if (u == t) {
// if the types are equal, merge(u,t)=u, so there is no change
diff --git a/src/asm/scala/tools/asm/Handle.java b/src/asm/scala/tools/asm/Handle.java
index be8f334192..5dd06a54b9 100644
--- a/src/asm/scala/tools/asm/Handle.java
+++ b/src/asm/scala/tools/asm/Handle.java
@@ -66,18 +66,23 @@ public final class Handle {
/**
* Constructs a new field or method handle.
*
- * @param tag the kind of field or method designated by this Handle. Must be
- * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
- * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
- * {@link Opcodes#H_INVOKEVIRTUAL}, {@link Opcodes#H_INVOKESTATIC},
- * {@link Opcodes#H_INVOKESPECIAL},
- * {@link Opcodes#H_NEWINVOKESPECIAL} or
- * {@link Opcodes#H_INVOKEINTERFACE}.
- * @param owner the internal name of the field or method designed by this
- * handle.
- * @param name the name of the field or method designated by this handle.
- * @param desc the descriptor of the field or method designated by this
- * handle.
+ * @param tag
+ * the kind of field or method designated by this Handle. Must be
+ * {@link Opcodes#H_GETFIELD}, {@link Opcodes#H_GETSTATIC},
+ * {@link Opcodes#H_PUTFIELD}, {@link Opcodes#H_PUTSTATIC},
+ * {@link Opcodes#H_INVOKEVIRTUAL},
+ * {@link Opcodes#H_INVOKESTATIC},
+ * {@link Opcodes#H_INVOKESPECIAL},
+ * {@link Opcodes#H_NEWINVOKESPECIAL} or
+ * {@link Opcodes#H_INVOKEINTERFACE}.
+ * @param owner
+ * the internal name of the field or method designed by this
+ * handle.
+ * @param name
+ * the name of the field or method designated by this handle.
+ * @param desc
+ * the descriptor of the field or method designated by this
+ * handle.
*/
public Handle(int tag, String owner, String name, String desc) {
this.tag = tag;
@@ -101,11 +106,9 @@ public final class Handle {
}
/**
- * Returns the internal name of the field or method designed by this
- * handle.
+ * Returns the internal name of the field or method designed by this handle.
*
- * @return the internal name of the field or method designed by this
- * handle.
+ * @return the internal name of the field or method designed by this handle.
*/
public String getOwner() {
return owner;
@@ -138,8 +141,8 @@ public final class Handle {
return false;
}
Handle h = (Handle) obj;
- return tag == h.tag && owner.equals(h.owner)
- && name.equals(h.name) && desc.equals(h.desc);
+ return tag == h.tag && owner.equals(h.owner) && name.equals(h.name)
+ && desc.equals(h.desc);
}
@Override
@@ -149,8 +152,13 @@ public final class Handle {
/**
* Returns the textual representation of this handle. The textual
- * representation is: <pre>owner '.' name desc ' ' '(' tag ')'</pre>. As
- * this format is unambiguous, it can be parsed if necessary.
+ * representation is:
+ *
+ * <pre>
+ * owner '.' name desc ' ' '(' tag ')'
+ * </pre>
+ *
+ * . As this format is unambiguous, it can be parsed if necessary.
*/
@Override
public String toString() {
diff --git a/src/asm/scala/tools/asm/Handler.java b/src/asm/scala/tools/asm/Handler.java
index 9e92bb98be..a06cb8152a 100644
--- a/src/asm/scala/tools/asm/Handler.java
+++ b/src/asm/scala/tools/asm/Handler.java
@@ -72,9 +72,12 @@ class Handler {
* Removes the range between start and end from the given exception
* handlers.
*
- * @param h an exception handler list.
- * @param start the start of the range to be removed.
- * @param end the end of the range to be removed. Maybe null.
+ * @param h
+ * an exception handler list.
+ * @param start
+ * the start of the range to be removed.
+ * @param end
+ * the end of the range to be removed. Maybe null.
* @return the exception handler list with the start-end range removed.
*/
static Handler remove(Handler h, Label start, Label end) {
diff --git a/src/asm/scala/tools/asm/Item.java b/src/asm/scala/tools/asm/Item.java
index 021a0b11d3..94195a1082 100644
--- a/src/asm/scala/tools/asm/Item.java
+++ b/src/asm/scala/tools/asm/Item.java
@@ -53,8 +53,8 @@ final class Item {
* {@link ClassWriter#METH}, {@link ClassWriter#IMETH},
* {@link ClassWriter#MTYPE}, {@link ClassWriter#INDY}.
*
- * MethodHandle constant 9 variations are stored using a range
- * of 9 values from {@link ClassWriter#HANDLE_BASE} + 1 to
+ * MethodHandle constant 9 variations are stored using a range of 9 values
+ * from {@link ClassWriter#HANDLE_BASE} + 1 to
* {@link ClassWriter#HANDLE_BASE} + 9.
*
* Special Item types are used for Items that are stored in the ClassWriter
@@ -115,7 +115,8 @@ final class Item {
* Constructs an uninitialized {@link Item} for constant pool element at
* given position.
*
- * @param index index of the item to be constructed.
+ * @param index
+ * index of the item to be constructed.
*/
Item(final int index) {
this.index = index;
@@ -124,8 +125,10 @@ final class Item {
/**
* Constructs a copy of the given item.
*
- * @param index index of the item to be constructed.
- * @param i the item that must be copied into the item to be constructed.
+ * @param index
+ * index of the item to be constructed.
+ * @param i
+ * the item that must be copied into the item to be constructed.
*/
Item(final int index, final Item i) {
this.index = index;
@@ -141,7 +144,8 @@ final class Item {
/**
* Sets this item to an integer item.
*
- * @param intVal the value of this item.
+ * @param intVal
+ * the value of this item.
*/
void set(final int intVal) {
this.type = ClassWriter.INT;
@@ -152,7 +156,8 @@ final class Item {
/**
* Sets this item to a long item.
*
- * @param longVal the value of this item.
+ * @param longVal
+ * the value of this item.
*/
void set(final long longVal) {
this.type = ClassWriter.LONG;
@@ -163,7 +168,8 @@ final class Item {
/**
* Sets this item to a float item.
*
- * @param floatVal the value of this item.
+ * @param floatVal
+ * the value of this item.
*/
void set(final float floatVal) {
this.type = ClassWriter.FLOAT;
@@ -174,7 +180,8 @@ final class Item {
/**
* Sets this item to a double item.
*
- * @param doubleVal the value of this item.
+ * @param doubleVal
+ * the value of this item.
*/
void set(final double doubleVal) {
this.type = ClassWriter.DOUBLE;
@@ -185,49 +192,53 @@ final class Item {
/**
* Sets this item to an item that do not hold a primitive value.
*
- * @param type the type of this item.
- * @param strVal1 first part of the value of this item.
- * @param strVal2 second part of the value of this item.
- * @param strVal3 third part of the value of this item.
+ * @param type
+ * the type of this item.
+ * @param strVal1
+ * first part of the value of this item.
+ * @param strVal2
+ * second part of the value of this item.
+ * @param strVal3
+ * third part of the value of this item.
*/
- void set(
- final int type,
- final String strVal1,
- final String strVal2,
- final String strVal3)
- {
+ void set(final int type, final String strVal1, final String strVal2,
+ final String strVal3) {
this.type = type;
this.strVal1 = strVal1;
this.strVal2 = strVal2;
this.strVal3 = strVal3;
switch (type) {
- case ClassWriter.UTF8:
- case ClassWriter.STR:
- case ClassWriter.CLASS:
- case ClassWriter.MTYPE:
- case ClassWriter.TYPE_NORMAL:
- hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
- return;
- case ClassWriter.NAME_TYPE:
- hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
- * strVal2.hashCode());
- return;
- // ClassWriter.FIELD:
- // ClassWriter.METH:
- // ClassWriter.IMETH:
- // ClassWriter.HANDLE_BASE + 1..9
- default:
- hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
- * strVal2.hashCode() * strVal3.hashCode());
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode());
+ return;
+ case ClassWriter.NAME_TYPE: {
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode());
+ return;
+ }
+ // ClassWriter.FIELD:
+ // ClassWriter.METH:
+ // ClassWriter.IMETH:
+ // ClassWriter.HANDLE_BASE + 1..9
+ default:
+ hashCode = 0x7FFFFFFF & (type + strVal1.hashCode()
+ * strVal2.hashCode() * strVal3.hashCode());
}
}
/**
* Sets the item to an InvokeDynamic item.
*
- * @param name invokedynamic's name.
- * @param desc invokedynamic's desc.
- * @param bsmIndex zero based index into the class attribute BootrapMethods.
+ * @param name
+ * invokedynamic's name.
+ * @param desc
+ * invokedynamic's desc.
+ * @param bsmIndex
+ * zero based index into the class attribute BootrapMethods.
*/
void set(String name, String desc, int bsmIndex) {
this.type = ClassWriter.INDY;
@@ -241,10 +252,12 @@ final class Item {
/**
* Sets the item to a BootstrapMethod item.
*
- * @param position position in byte in the class attribute BootrapMethods.
- * @param hashCode hashcode of the item. This hashcode is processed from
- * the hashcode of the bootstrap method and the hashcode of
- * all bootstrap arguments.
+ * @param position
+ * position in byte in the class attribute BootrapMethods.
+ * @param hashCode
+ * hashcode of the item. This hashcode is processed from the
+ * hashcode of the bootstrap method and the hashcode of all
+ * bootstrap arguments.
*/
void set(int position, int hashCode) {
this.type = ClassWriter.BSM;
@@ -256,41 +269,42 @@ final class Item {
* Indicates if the given item is equal to this one. <i>This method assumes
* that the two items have the same {@link #type}</i>.
*
- * @param i the item to be compared to this one. Both items must have the
- * same {@link #type}.
+ * @param i
+ * the item to be compared to this one. Both items must have the
+ * same {@link #type}.
* @return <tt>true</tt> if the given item if equal to this one,
* <tt>false</tt> otherwise.
*/
boolean isEqualTo(final Item i) {
switch (type) {
- case ClassWriter.UTF8:
- case ClassWriter.STR:
- case ClassWriter.CLASS:
- case ClassWriter.MTYPE:
- case ClassWriter.TYPE_NORMAL:
- return i.strVal1.equals(strVal1);
- case ClassWriter.TYPE_MERGED:
- case ClassWriter.LONG:
- case ClassWriter.DOUBLE:
- return i.longVal == longVal;
- case ClassWriter.INT:
- case ClassWriter.FLOAT:
- return i.intVal == intVal;
- case ClassWriter.TYPE_UNINIT:
- return i.intVal == intVal && i.strVal1.equals(strVal1);
- case ClassWriter.NAME_TYPE:
- return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
- case ClassWriter.INDY:
- return i.longVal == longVal && i.strVal1.equals(strVal1)
- && i.strVal2.equals(strVal2);
-
- // case ClassWriter.FIELD:
- // case ClassWriter.METH:
- // case ClassWriter.IMETH:
- // case ClassWriter.HANDLE_BASE + 1..9
- default:
- return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
- && i.strVal3.equals(strVal3);
+ case ClassWriter.UTF8:
+ case ClassWriter.STR:
+ case ClassWriter.CLASS:
+ case ClassWriter.MTYPE:
+ case ClassWriter.TYPE_NORMAL:
+ return i.strVal1.equals(strVal1);
+ case ClassWriter.TYPE_MERGED:
+ case ClassWriter.LONG:
+ case ClassWriter.DOUBLE:
+ return i.longVal == longVal;
+ case ClassWriter.INT:
+ case ClassWriter.FLOAT:
+ return i.intVal == intVal;
+ case ClassWriter.TYPE_UNINIT:
+ return i.intVal == intVal && i.strVal1.equals(strVal1);
+ case ClassWriter.NAME_TYPE:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2);
+ case ClassWriter.INDY: {
+ return i.longVal == longVal && i.strVal1.equals(strVal1)
+ && i.strVal2.equals(strVal2);
+ }
+ // case ClassWriter.FIELD:
+ // case ClassWriter.METH:
+ // case ClassWriter.IMETH:
+ // case ClassWriter.HANDLE_BASE + 1..9
+ default:
+ return i.strVal1.equals(strVal1) && i.strVal2.equals(strVal2)
+ && i.strVal3.equals(strVal3);
}
}
diff --git a/src/asm/scala/tools/asm/Label.java b/src/asm/scala/tools/asm/Label.java
index 712c7f251f..5d5529ce74 100644
--- a/src/asm/scala/tools/asm/Label.java
+++ b/src/asm/scala/tools/asm/Label.java
@@ -32,9 +32,9 @@ package scala.tools.asm;
/**
* A label represents a position in the bytecode of a method. Labels are used
* for jump, goto, and switch instructions, and for try catch blocks. A label
- * designates the <i>instruction</i> that is just after. Note however that
- * there can be other elements between a label and the instruction it
- * designates (such as other labels, stack map frames, line numbers, etc.).
+ * designates the <i>instruction</i> that is just after. Note however that there
+ * can be other elements between a label and the instruction it designates (such
+ * as other labels, stack map frames, line numbers, etc.).
*
* @author Eric Bruneton
*/
@@ -110,8 +110,8 @@ public class Label {
/**
* Field used to associate user information to a label. Warning: this field
* is used by the ASM tree package. In order to use it with the ASM tree
- * package you must override the {@link
- * org.objectweb.asm.tree.MethodNode#getLabelNode} method.
+ * package you must override the
+ * {@link scala.tools.asm.tree.MethodNode#getLabelNode} method.
*/
public Object info;
@@ -154,7 +154,7 @@ public class Label {
* indicates if this reference uses 2 or 4 bytes, and its absolute value
* gives the position of the bytecode instruction. This array is also used
* as a bitset to store the subroutines to which a basic block belongs. This
- * information is needed in {@linked MethodWriter#visitMaxs}, after all
+ * information is needed in {@linked MethodWriter#visitMaxs}, after all
* forward references have been resolved. Hence the same array can be used
* for both purposes without problems.
*/
@@ -177,11 +177,11 @@ public class Label {
* state of the local variables and the operand stack at the end of each
* basic block, called the "output frame", <i>relatively</i> to the frame
* state at the beginning of the basic block, which is called the "input
- * frame", and which is <i>unknown</i> during this step. The second step,
- * in {@link MethodWriter#visitMaxs}, is a fix point algorithm that
- * computes information about the input frame of each basic block, from the
- * input state of the first basic block (known from the method signature),
- * and by the using the previously computed relative output frames.
+ * frame", and which is <i>unknown</i> during this step. The second step, in
+ * {@link MethodWriter#visitMaxs}, is a fix point algorithm that computes
+ * information about the input frame of each basic block, from the input
+ * state of the first basic block (known from the method signature), and by
+ * the using the previously computed relative output frames.
*
* The algorithm used to compute the maximum stack size only computes the
* relative output and absolute input stack heights, while the algorithm
@@ -266,11 +266,13 @@ public class Label {
* generators or adapters.</i>
*
* @return the offset corresponding to this label.
- * @throws IllegalStateException if this label is not resolved yet.
+ * @throws IllegalStateException
+ * if this label is not resolved yet.
*/
public int getOffset() {
if ((status & RESOLVED) == 0) {
- throw new IllegalStateException("Label offset position has not been resolved yet");
+ throw new IllegalStateException(
+ "Label offset position has not been resolved yet");
}
return position;
}
@@ -281,21 +283,21 @@ public class Label {
* directly. Otherwise, a null offset is written and a new forward reference
* is declared for this label.
*
- * @param owner the code writer that calls this method.
- * @param out the bytecode of the method.
- * @param source the position of first byte of the bytecode instruction that
- * contains this label.
- * @param wideOffset <tt>true</tt> if the reference must be stored in 4
- * bytes, or <tt>false</tt> if it must be stored with 2 bytes.
- * @throws IllegalArgumentException if this label has not been created by
- * the given code writer.
- */
- void put(
- final MethodWriter owner,
- final ByteVector out,
- final int source,
- final boolean wideOffset)
- {
+ * @param owner
+ * the code writer that calls this method.
+ * @param out
+ * the bytecode of the method.
+ * @param source
+ * the position of first byte of the bytecode instruction that
+ * contains this label.
+ * @param wideOffset
+ * <tt>true</tt> if the reference must be stored in 4 bytes, or
+ * <tt>false</tt> if it must be stored with 2 bytes.
+ * @throws IllegalArgumentException
+ * if this label has not been created by the given code writer.
+ */
+ void put(final MethodWriter owner, final ByteVector out, final int source,
+ final boolean wideOffset) {
if ((status & RESOLVED) == 0) {
if (wideOffset) {
addReference(-1 - source, out.length);
@@ -319,25 +321,21 @@ public class Label {
* yet. For backward references, the offset of the reference can be, and
* must be, computed and stored directly.
*
- * @param sourcePosition the position of the referencing instruction. This
- * position will be used to compute the offset of this forward
- * reference.
- * @param referencePosition the position where the offset for this forward
- * reference must be stored.
- */
- private void addReference(
- final int sourcePosition,
- final int referencePosition)
- {
+ * @param sourcePosition
+ * the position of the referencing instruction. This position
+ * will be used to compute the offset of this forward reference.
+ * @param referencePosition
+ * the position where the offset for this forward reference must
+ * be stored.
+ */
+ private void addReference(final int sourcePosition,
+ final int referencePosition) {
if (srcAndRefPositions == null) {
srcAndRefPositions = new int[6];
}
if (referenceCount >= srcAndRefPositions.length) {
int[] a = new int[srcAndRefPositions.length + 6];
- System.arraycopy(srcAndRefPositions,
- 0,
- a,
- 0,
+ System.arraycopy(srcAndRefPositions, 0, a, 0,
srcAndRefPositions.length);
srcAndRefPositions = a;
}
@@ -351,9 +349,12 @@ public class Label {
* position becomes known. This method fills in the blanks that where left
* in the bytecode by each forward reference previously added to this label.
*
- * @param owner the code writer that calls this method.
- * @param position the position of this label in the bytecode.
- * @param data the bytecode of the method.
+ * @param owner
+ * the code writer that calls this method.
+ * @param position
+ * the position of this label in the bytecode.
+ * @param data
+ * the bytecode of the method.
* @return <tt>true</tt> if a blank that was left for this label was to
* small to store the offset. In such a case the corresponding jump
* instruction is replaced with a pseudo instruction (using unused
@@ -361,14 +362,12 @@ public class Label {
* instructions will need to be replaced with true instructions with
* wider offsets (4 bytes instead of 2). This is done in
* {@link MethodWriter#resizeInstructions}.
- * @throws IllegalArgumentException if this label has already been resolved,
- * or if it has not been created by the given code writer.
- */
- boolean resolve(
- final MethodWriter owner,
- final int position,
- final byte[] data)
- {
+ * @throws IllegalArgumentException
+ * if this label has already been resolved, or if it has not
+ * been created by the given code writer.
+ */
+ boolean resolve(final MethodWriter owner, final int position,
+ final byte[] data) {
boolean needUpdate = false;
this.status |= RESOLVED;
this.position = position;
@@ -431,7 +430,8 @@ public class Label {
/**
* Returns true is this basic block belongs to the given subroutine.
*
- * @param id a subroutine id.
+ * @param id
+ * a subroutine id.
* @return true is this basic block belongs to the given subroutine.
*/
boolean inSubroutine(final long id) {
@@ -445,7 +445,8 @@ public class Label {
* Returns true if this basic block and the given one belong to a common
* subroutine.
*
- * @param block another basic block.
+ * @param block
+ * another basic block.
* @return true if this basic block and the given one belong to a common
* subroutine.
*/
@@ -464,8 +465,10 @@ public class Label {
/**
* Marks this basic block as belonging to the given subroutine.
*
- * @param id a subroutine id.
- * @param nbSubroutines the total number of subroutines in the method.
+ * @param id
+ * a subroutine id.
+ * @param nbSubroutines
+ * the total number of subroutines in the method.
*/
void addToSubroutine(final long id, final int nbSubroutines) {
if ((status & VISITED) == 0) {
@@ -481,14 +484,16 @@ public class Label {
* flow graph to find all the blocks that are reachable from the current
* block WITHOUT following any JSR target.
*
- * @param JSR a JSR block that jumps to this subroutine. If this JSR is not
- * null it is added to the successor of the RET blocks found in the
- * subroutine.
- * @param id the id of this subroutine.
- * @param nbSubroutines the total number of subroutines in the method.
- */
- void visitSubroutine(final Label JSR, final long id, final int nbSubroutines)
- {
+ * @param JSR
+ * a JSR block that jumps to this subroutine. If this JSR is not
+ * null it is added to the successor of the RET blocks found in
+ * the subroutine.
+ * @param id
+ * the id of this subroutine.
+ * @param nbSubroutines
+ * the total number of subroutines in the method.
+ */
+ void visitSubroutine(final Label JSR, final long id, final int nbSubroutines) {
// user managed stack of labels, to avoid using a recursive method
// (recursivity can lead to stack overflow with very large methods)
Label stack = this;
diff --git a/src/asm/scala/tools/asm/MethodVisitor.java b/src/asm/scala/tools/asm/MethodVisitor.java
index a8a859a6a9..e43ca97823 100644
--- a/src/asm/scala/tools/asm/MethodVisitor.java
+++ b/src/asm/scala/tools/asm/MethodVisitor.java
@@ -30,19 +30,19 @@
package scala.tools.asm;
/**
- * A visitor to visit a Java method. The methods of this class must be
- * called in the following order: [ <tt>visitAnnotationDefault</tt> ] (
+ * A visitor to visit a Java method. The methods of this class must be called in
+ * the following order: [ <tt>visitAnnotationDefault</tt> ] (
* <tt>visitAnnotation</tt> | <tt>visitParameterAnnotation</tt> |
* <tt>visitAttribute</tt> )* [ <tt>visitCode</tt> ( <tt>visitFrame</tt> |
- * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> | <tt>visitTryCatchBlock</tt> |
- * <tt>visitLocalVariable</tt> | <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ]
- * <tt>visitEnd</tt>. In addition, the <tt>visit</tt><i>X</i>Insn</tt>
- * and <tt>visitLabel</tt> methods must be called in the sequential order of
- * the bytecode instructions of the visited code, <tt>visitTryCatchBlock</tt>
- * must be called <i>before</i> the labels passed as arguments have been
- * visited, and the <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt>
- * methods must be called <i>after</i> the labels passed as arguments have been
- * visited.
+ * <tt>visit</tt><i>X</i>Insn</tt> | <tt>visitLabel</tt> |
+ * <tt>visitTryCatchBlock</tt> | <tt>visitLocalVariable</tt> |
+ * <tt>visitLineNumber</tt> )* <tt>visitMaxs</tt> ] <tt>visitEnd</tt>. In
+ * addition, the <tt>visit</tt><i>X</i>Insn</tt> and <tt>visitLabel</tt> methods
+ * must be called in the sequential order of the bytecode instructions of the
+ * visited code, <tt>visitTryCatchBlock</tt> must be called <i>before</i> the
+ * labels passed as arguments have been visited, and the
+ * <tt>visitLocalVariable</tt> and <tt>visitLineNumber</tt> methods must be
+ * called <i>after</i> the labels passed as arguments have been visited.
*
* @author Eric Bruneton
*/
@@ -63,8 +63,9 @@ public abstract class MethodVisitor {
/**
* Constructs a new {@link MethodVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public MethodVisitor(final int api) {
this(api, null);
@@ -73,15 +74,17 @@ public abstract class MethodVisitor {
/**
* Constructs a new {@link MethodVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param mv the method visitor to which this visitor must delegate method
- * calls. May be null.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param mv
+ * the method visitor to which this visitor must delegate method
+ * calls. May be null.
*/
public MethodVisitor(final int api, final MethodVisitor mv) {
- /*if (api != Opcodes.ASM4) {
+ if (api != Opcodes.ASM4) {
throw new IllegalArgumentException();
- }*/
+ }
this.api = api;
this.mv = mv;
}
@@ -94,8 +97,8 @@ public abstract class MethodVisitor {
* Visits the default value of this annotation interface method.
*
* @return a visitor to the visit the actual default value of this
- * annotation interface method, or <tt>null</tt> if this visitor
- * is not interested in visiting this default value. The 'name'
+ * annotation interface method, or <tt>null</tt> if this visitor is
+ * not interested in visiting this default value. The 'name'
* parameters passed to the methods of this annotation visitor are
* ignored. Moreover, exacly one visit method must be called on this
* annotation visitor, followed by visitEnd.
@@ -110,8 +113,10 @@ public abstract class MethodVisitor {
/**
* Visits an annotation of this method.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
@@ -125,17 +130,17 @@ public abstract class MethodVisitor {
/**
* Visits an annotation of a parameter this method.
*
- * @param parameter the parameter index.
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param parameter
+ * the parameter index.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values, or <tt>null</tt> if
* this visitor is not interested in visiting this annotation.
*/
- public AnnotationVisitor visitParameterAnnotation(
- int parameter,
- String desc,
- boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(int parameter,
+ String desc, boolean visible) {
if (mv != null) {
return mv.visitParameterAnnotation(parameter, desc, visible);
}
@@ -145,7 +150,8 @@ public abstract class MethodVisitor {
/**
* Visits a non standard attribute of this method.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(Attribute attr) {
if (mv != null) {
@@ -169,57 +175,74 @@ public abstract class MethodVisitor {
* such as GOTO or THROW, that is the target of a jump instruction, or that
* starts an exception handler block. The visited types must describe the
* values of the local variables and of the operand stack elements <i>just
- * before</i> <b>i</b> is executed. <br> <br> (*) this is mandatory only
- * for classes whose version is greater than or equal to
- * {@link Opcodes#V1_6 V1_6}. <br> <br> Packed frames are basically
- * "deltas" from the state of the previous frame (very first frame is
- * implicitly defined by the method's parameters and access flags): <ul>
+ * before</i> <b>i</b> is executed.<br>
+ * <br>
+ * (*) this is mandatory only for classes whose version is greater than or
+ * equal to {@link Opcodes#V1_6 V1_6}. <br>
+ * <br>
+ * The frames of a method must be given either in expanded form, or in
+ * compressed form (all frames must use the same format, i.e. you must not
+ * mix expanded and compressed frames within a single method):
+ * <ul>
+ * <li>In expanded form, all frames must have the F_NEW type.</li>
+ * <li>In compressed form, frames are basically "deltas" from the state of
+ * the previous frame:
+ * <ul>
* <li>{@link Opcodes#F_SAME} representing frame with exactly the same
- * locals as the previous frame and with the empty stack.</li> <li>{@link Opcodes#F_SAME1}
- * representing frame with exactly the same locals as the previous frame and
- * with single value on the stack (<code>nStack</code> is 1 and
- * <code>stack[0]</code> contains value for the type of the stack item).</li>
+ * locals as the previous frame and with the empty stack.</li>
+ * <li>{@link Opcodes#F_SAME1} representing frame with exactly the same
+ * locals as the previous frame and with single value on the stack (
+ * <code>nStack</code> is 1 and <code>stack[0]</code> contains value for the
+ * type of the stack item).</li>
* <li>{@link Opcodes#F_APPEND} representing frame with current locals are
* the same as the locals in the previous frame, except that additional
* locals are defined (<code>nLocal</code> is 1, 2 or 3 and
* <code>local</code> elements contains values representing added types).</li>
- * <li>{@link Opcodes#F_CHOP} representing frame with current locals are
- * the same as the locals in the previous frame, except that the last 1-3
- * locals are absent and with the empty stack (<code>nLocals</code> is 1,
- * 2 or 3). </li> <li>{@link Opcodes#F_FULL} representing complete frame
- * data.</li> </li> </ul>
+ * <li>{@link Opcodes#F_CHOP} representing frame with current locals are the
+ * same as the locals in the previous frame, except that the last 1-3 locals
+ * are absent and with the empty stack (<code>nLocals</code> is 1, 2 or 3).</li>
+ * <li>{@link Opcodes#F_FULL} representing complete frame data.</li></li>
+ * </ul>
+ * </ul> <br>
+ * In both cases the first frame, corresponding to the method's parameters
+ * and access flags, is implicit and must not be visited. Also, it is
+ * illegal to visit two or more frames for the same code location (i.e., at
+ * least one instruction must be visited between two calls to visitFrame).
*
- * @param type the type of this stack map frame. Must be
- * {@link Opcodes#F_NEW} for expanded frames, or
- * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
- * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
- * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for compressed
- * frames.
- * @param nLocal the number of local variables in the visited frame.
- * @param local the local variable types in this frame. This array must not
- * be modified. Primitive types are represented by
- * {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
- * {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
- * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
- * {@link Opcodes#UNINITIALIZED_THIS} (long and double are
- * represented by a single element). Reference types are represented
- * by String objects (representing internal names), and uninitialized
- * types by Label objects (this label designates the NEW instruction
- * that created this uninitialized value).
- * @param nStack the number of operand stack elements in the visited frame.
- * @param stack the operand stack types in this frame. This array must not
- * be modified. Its content has the same format as the "local" array.
- * @throws IllegalStateException if a frame is visited just after another
- * one, without any instruction between the two (unless this frame
- * is a Opcodes#F_SAME frame, in which case it is silently ignored).
+ * @param type
+ * the type of this stack map frame. Must be
+ * {@link Opcodes#F_NEW} for expanded frames, or
+ * {@link Opcodes#F_FULL}, {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_CHOP}, {@link Opcodes#F_SAME} or
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_SAME1} for
+ * compressed frames.
+ * @param nLocal
+ * the number of local variables in the visited frame.
+ * @param local
+ * the local variable types in this frame. This array must not be
+ * modified. Primitive types are represented by
+ * {@link Opcodes#TOP}, {@link Opcodes#INTEGER},
+ * {@link Opcodes#FLOAT}, {@link Opcodes#LONG},
+ * {@link Opcodes#DOUBLE},{@link Opcodes#NULL} or
+ * {@link Opcodes#UNINITIALIZED_THIS} (long and double are
+ * represented by a single element). Reference types are
+ * represented by String objects (representing internal names),
+ * and uninitialized types by Label objects (this label
+ * designates the NEW instruction that created this uninitialized
+ * value).
+ * @param nStack
+ * the number of operand stack elements in the visited frame.
+ * @param stack
+ * the operand stack types in this frame. This array must not be
+ * modified. Its content has the same format as the "local"
+ * array.
+ * @throws IllegalStateException
+ * if a frame is visited just after another one, without any
+ * instruction between the two (unless this frame is a
+ * Opcodes#F_SAME frame, in which case it is silently ignored).
*/
- public void visitFrame(
- int type,
- int nLocal,
- Object[] local,
- int nStack,
- Object[] stack)
- {
+ public void visitFrame(int type, int nLocal, Object[] local, int nStack,
+ Object[] stack) {
if (mv != null) {
mv.visitFrame(type, nLocal, local, nStack, stack);
}
@@ -232,20 +255,22 @@ public abstract class MethodVisitor {
/**
* Visits a zero operand instruction.
*
- * @param opcode the opcode of the instruction to be visited. This opcode is
- * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1, ICONST_2,
- * ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1, FCONST_0,
- * FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD, FALOAD,
- * DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE, FASTORE,
- * DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2, DUP,
- * DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD, FADD,
- * DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV, LDIV,
- * FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL,
- * LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR,
- * I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B,
- * I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
- * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
- * MONITORENTER, or MONITOREXIT.
+ * @param opcode
+ * the opcode of the instruction to be visited. This opcode is
+ * either NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+ * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+ * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
+ * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
+ * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
+ * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
+ * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
+ * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
+ * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
+ * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
+ * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
+ * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
+ * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
+ * or MONITOREXIT.
*/
public void visitInsn(int opcode) {
if (mv != null) {
@@ -256,17 +281,20 @@ public abstract class MethodVisitor {
/**
* Visits an instruction with a single int operand.
*
- * @param opcode the opcode of the instruction to be visited. This opcode is
- * either BIPUSH, SIPUSH or NEWARRAY.
- * @param operand the operand of the instruction to be visited.<br> When
- * opcode is BIPUSH, operand value should be between Byte.MIN_VALUE
- * and Byte.MAX_VALUE.<br> When opcode is SIPUSH, operand value
- * should be between Short.MIN_VALUE and Short.MAX_VALUE.<br> When
- * opcode is NEWARRAY, operand value should be one of
- * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
- * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
- * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
- * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
+ * @param opcode
+ * the opcode of the instruction to be visited. This opcode is
+ * either BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand
+ * the operand of the instruction to be visited.<br>
+ * When opcode is BIPUSH, operand value should be between
+ * Byte.MIN_VALUE and Byte.MAX_VALUE.<br>
+ * When opcode is SIPUSH, operand value should be between
+ * Short.MIN_VALUE and Short.MAX_VALUE.<br>
+ * When opcode is NEWARRAY, operand value should be one of
+ * {@link Opcodes#T_BOOLEAN}, {@link Opcodes#T_CHAR},
+ * {@link Opcodes#T_FLOAT}, {@link Opcodes#T_DOUBLE},
+ * {@link Opcodes#T_BYTE}, {@link Opcodes#T_SHORT},
+ * {@link Opcodes#T_INT} or {@link Opcodes#T_LONG}.
*/
public void visitIntInsn(int opcode, int operand) {
if (mv != null) {
@@ -278,11 +306,13 @@ public abstract class MethodVisitor {
* Visits a local variable instruction. A local variable instruction is an
* instruction that loads or stores the value of a local variable.
*
- * @param opcode the opcode of the local variable instruction to be visited.
- * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE,
- * LSTORE, FSTORE, DSTORE, ASTORE or RET.
- * @param var the operand of the instruction to be visited. This operand is
- * the index of a local variable.
+ * @param opcode
+ * the opcode of the local variable instruction to be visited.
+ * This opcode is either ILOAD, LLOAD, FLOAD, DLOAD, ALOAD,
+ * ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var
+ * the operand of the instruction to be visited. This operand is
+ * the index of a local variable.
*/
public void visitVarInsn(int opcode, int var) {
if (mv != null) {
@@ -294,11 +324,13 @@ public abstract class MethodVisitor {
* Visits a type instruction. A type instruction is an instruction that
* takes the internal name of a class as parameter.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
- * @param type the operand of the instruction to be visited. This operand
- * must be the internal name of an object or array class (see {@link
- * Type#getInternalName() getInternalName}).
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param type
+ * the operand of the instruction to be visited. This operand
+ * must be the internal name of an object or array class (see
+ * {@link Type#getInternalName() getInternalName}).
*/
public void visitTypeInsn(int opcode, String type) {
if (mv != null) {
@@ -310,14 +342,19 @@ public abstract class MethodVisitor {
* Visits a field instruction. A field instruction is an instruction that
* loads or stores the value of a field of an object.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
- * @param owner the internal name of the field's owner class (see {@link
- * Type#getInternalName() getInternalName}).
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link Type Type}).
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner
+ * the internal name of the field's owner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link Type Type}).
*/
- public void visitFieldInsn(int opcode, String owner, String name, String desc) {
+ public void visitFieldInsn(int opcode, String owner, String name,
+ String desc) {
if (mv != null) {
mv.visitFieldInsn(opcode, owner, name, desc);
}
@@ -327,15 +364,20 @@ public abstract class MethodVisitor {
* Visits a method instruction. A method instruction is an instruction that
* invokes a method.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC
- * or INVOKEINTERFACE.
- * @param owner the internal name of the method's owner class (see {@link
- * Type#getInternalName() getInternalName}).
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner
+ * the internal name of the method's owner class (see
+ * {@link Type#getInternalName() getInternalName}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
*/
- public void visitMethodInsn(int opcode, String owner, String name, String desc) {
+ public void visitMethodInsn(int opcode, String owner, String name,
+ String desc) {
if (mv != null) {
mv.visitMethodInsn(opcode, owner, name, desc);
}
@@ -344,16 +386,21 @@ public abstract class MethodVisitor {
/**
* Visits an invokedynamic instruction.
*
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
- * @param bsm the bootstrap method.
- * @param bsmArgs the bootstrap method constant arguments. Each argument
- * must be an {@link Integer}, {@link Float}, {@link Long},
- * {@link Double}, {@link String}, {@link Type} or {@link Handle}
- * value. This method is allowed to modify the content of the array
- * so a caller should expect that this array may change.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the bootstrap method constant arguments. Each argument must be
+ * an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double}, {@link String}, {@link Type} or {@link Handle}
+ * value. This method is allowed to modify the content of the
+ * array so a caller should expect that this array may change.
*/
- public void visitInvokeDynamicInsn(String name, String desc, Handle bsm, Object... bsmArgs) {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
if (mv != null) {
mv.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
@@ -363,13 +410,15 @@ public abstract class MethodVisitor {
* Visits a jump instruction. A jump instruction is an instruction that may
* jump to another instruction.
*
- * @param opcode the opcode of the type instruction to be visited. This
- * opcode is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
- * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
- * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
- * @param label the operand of the instruction to be visited. This operand
- * is a label that designates the instruction to which the jump
- * instruction may jump.
+ * @param opcode
+ * the opcode of the type instruction to be visited. This opcode
+ * is either IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+ * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label
+ * the operand of the instruction to be visited. This operand is
+ * a label that designates the instruction to which the jump
+ * instruction may jump.
*/
public void visitJumpInsn(int opcode, Label label) {
if (mv != null) {
@@ -381,7 +430,8 @@ public abstract class MethodVisitor {
* Visits a label. A label designates the instruction that will be visited
* just after it.
*
- * @param label a {@link Label Label} object.
+ * @param label
+ * a {@link Label Label} object.
*/
public void visitLabel(Label label) {
if (mv != null) {
@@ -398,41 +448,44 @@ public abstract class MethodVisitor {
* future versions of the Java Virtual Machine. To easily detect new
* constant types, implementations of this method should check for
* unexpected constant types, like this:
+ *
* <pre>
* if (cst instanceof Integer) {
- * // ...
+ * // ...
* } else if (cst instanceof Float) {
- * // ...
+ * // ...
* } else if (cst instanceof Long) {
- * // ...
- * } else if (cst instanceof Double) {
- * // ...
- * } else if (cst instanceof String) {
- * // ...
- * } else if (cst instanceof Type) {
- * int sort = ((Type) cst).getSort();
- * if (sort == Type.OBJECT) {
* // ...
- * } else if (sort == Type.ARRAY) {
+ * } else if (cst instanceof Double) {
* // ...
- * } else if (sort == Type.METHOD) {
+ * } else if (cst instanceof String) {
* // ...
- * } else {
- * // throw an exception
- * }
+ * } else if (cst instanceof Type) {
+ * int sort = ((Type) cst).getSort();
+ * if (sort == Type.OBJECT) {
+ * // ...
+ * } else if (sort == Type.ARRAY) {
+ * // ...
+ * } else if (sort == Type.METHOD) {
+ * // ...
+ * } else {
+ * // throw an exception
+ * }
* } else if (cst instanceof Handle) {
- * // ...
+ * // ...
* } else {
- * // throw an exception
- * }</pre>
+ * // throw an exception
+ * }
+ * </pre>
*
- * @param cst the constant to be loaded on the stack. This parameter must be
- * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double}, a {@link String}, a {@link Type} of OBJECT or ARRAY
- * sort for <tt>.class</tt> constants, for classes whose version is
- * 49.0, a {@link Type} of METHOD sort or a {@link Handle} for
- * MethodType and MethodHandle constants, for classes whose version
- * is 51.0.
+ * @param cst
+ * the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double}, a {@link String}, a {@link Type} of OBJECT or
+ * ARRAY sort for <tt>.class</tt> constants, for classes whose
+ * version is 49.0, a {@link Type} of METHOD sort or a
+ * {@link Handle} for MethodType and MethodHandle constants, for
+ * classes whose version is 51.0.
*/
public void visitLdcInsn(Object cst) {
if (mv != null) {
@@ -443,8 +496,10 @@ public abstract class MethodVisitor {
/**
* Visits an IINC instruction.
*
- * @param var index of the local variable to be incremented.
- * @param increment amount to increment the local variable by.
+ * @param var
+ * index of the local variable to be incremented.
+ * @param increment
+ * amount to increment the local variable by.
*/
public void visitIincInsn(int var, int increment) {
if (mv != null) {
@@ -455,13 +510,18 @@ public abstract class MethodVisitor {
/**
* Visits a TABLESWITCH instruction.
*
- * @param min the minimum key value.
- * @param max the maximum key value.
- * @param dflt beginning of the default handler block.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>min + i</tt> key.
+ * @param min
+ * the minimum key value.
+ * @param max
+ * the maximum key value.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>min + i</tt> key.
*/
- public void visitTableSwitchInsn(int min, int max, Label dflt, Label... labels) {
+ public void visitTableSwitchInsn(int min, int max, Label dflt,
+ Label... labels) {
if (mv != null) {
mv.visitTableSwitchInsn(min, max, dflt, labels);
}
@@ -470,10 +530,13 @@ public abstract class MethodVisitor {
/**
* Visits a LOOKUPSWITCH instruction.
*
- * @param dflt beginning of the default handler block.
- * @param keys the values of the keys.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param keys
+ * the values of the keys.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>keys[i]</tt> key.
*/
public void visitLookupSwitchInsn(Label dflt, int[] keys, Label[] labels) {
if (mv != null) {
@@ -484,8 +547,10 @@ public abstract class MethodVisitor {
/**
* Visits a MULTIANEWARRAY instruction.
*
- * @param desc an array type descriptor (see {@link Type Type}).
- * @param dims number of dimensions of the array to allocate.
+ * @param desc
+ * an array type descriptor (see {@link Type Type}).
+ * @param dims
+ * number of dimensions of the array to allocate.
*/
public void visitMultiANewArrayInsn(String desc, int dims) {
if (mv != null) {
@@ -500,17 +565,22 @@ public abstract class MethodVisitor {
/**
* Visits a try catch block.
*
- * @param start beginning of the exception handler's scope (inclusive).
- * @param end end of the exception handler's scope (exclusive).
- * @param handler beginning of the exception handler's code.
- * @param type internal name of the type of exceptions handled by the
- * handler, or <tt>null</tt> to catch any exceptions (for "finally"
- * blocks).
- * @throws IllegalArgumentException if one of the labels has already been
- * visited by this visitor (by the {@link #visitLabel visitLabel}
- * method).
+ * @param start
+ * beginning of the exception handler's scope (inclusive).
+ * @param end
+ * end of the exception handler's scope (exclusive).
+ * @param handler
+ * beginning of the exception handler's code.
+ * @param type
+ * internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for
+ * "finally" blocks).
+ * @throws IllegalArgumentException
+ * if one of the labels has already been visited by this visitor
+ * (by the {@link #visitLabel visitLabel} method).
*/
- public void visitTryCatchBlock(Label start, Label end, Label handler, String type) {
+ public void visitTryCatchBlock(Label start, Label end, Label handler,
+ String type) {
if (mv != null) {
mv.visitTryCatchBlock(start, end, handler, type);
}
@@ -519,28 +589,28 @@ public abstract class MethodVisitor {
/**
* Visits a local variable declaration.
*
- * @param name the name of a local variable.
- * @param desc the type descriptor of this local variable.
- * @param signature the type signature of this local variable. May be
- * <tt>null</tt> if the local variable type does not use generic
- * types.
- * @param start the first instruction corresponding to the scope of this
- * local variable (inclusive).
- * @param end the last instruction corresponding to the scope of this local
- * variable (exclusive).
- * @param index the local variable's index.
- * @throws IllegalArgumentException if one of the labels has not already
- * been visited by this visitor (by the
- * {@link #visitLabel visitLabel} method).
+ * @param name
+ * the name of a local variable.
+ * @param desc
+ * the type descriptor of this local variable.
+ * @param signature
+ * the type signature of this local variable. May be
+ * <tt>null</tt> if the local variable type does not use generic
+ * types.
+ * @param start
+ * the first instruction corresponding to the scope of this local
+ * variable (inclusive).
+ * @param end
+ * the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index
+ * the local variable's index.
+ * @throws IllegalArgumentException
+ * if one of the labels has not already been visited by this
+ * visitor (by the {@link #visitLabel visitLabel} method).
*/
- public void visitLocalVariable(
- String name,
- String desc,
- String signature,
- Label start,
- Label end,
- int index)
- {
+ public void visitLocalVariable(String name, String desc, String signature,
+ Label start, Label end, int index) {
if (mv != null) {
mv.visitLocalVariable(name, desc, signature, start, end, index);
}
@@ -549,12 +619,14 @@ public abstract class MethodVisitor {
/**
* Visits a line number declaration.
*
- * @param line a line number. This number refers to the source file from
- * which the class was compiled.
- * @param start the first instruction corresponding to this line number.
- * @throws IllegalArgumentException if <tt>start</tt> has not already been
- * visited by this visitor (by the {@link #visitLabel visitLabel}
- * method).
+ * @param line
+ * a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start
+ * the first instruction corresponding to this line number.
+ * @throws IllegalArgumentException
+ * if <tt>start</tt> has not already been visited by this
+ * visitor (by the {@link #visitLabel visitLabel} method).
*/
public void visitLineNumber(int line, Label start) {
if (mv != null) {
@@ -566,8 +638,10 @@ public abstract class MethodVisitor {
* Visits the maximum stack size and the maximum number of local variables
* of the method.
*
- * @param maxStack maximum stack size of the method.
- * @param maxLocals maximum number of local variables for the method.
+ * @param maxStack
+ * maximum stack size of the method.
+ * @param maxLocals
+ * maximum number of local variables for the method.
*/
public void visitMaxs(int maxStack, int maxLocals) {
if (mv != null) {
diff --git a/src/asm/scala/tools/asm/MethodWriter.java b/src/asm/scala/tools/asm/MethodWriter.java
index 321bacb6fc..f5fbd1e74f 100644
--- a/src/asm/scala/tools/asm/MethodWriter.java
+++ b/src/asm/scala/tools/asm/MethodWriter.java
@@ -42,7 +42,7 @@ class MethodWriter extends MethodVisitor {
/**
* Pseudo access flag used to denote constructors.
*/
- static final int ACC_CONSTRUCTOR = 262144;
+ static final int ACC_CONSTRUCTOR = 0x80000;
/**
* Frame has exactly the same locals as the previous stack map frame and
@@ -229,7 +229,7 @@ class MethodWriter extends MethodVisitor {
private int maxLocals;
/**
- * Number of local variables in the current stack map frame.
+ * Number of local variables in the current stack map frame.
*/
private int currentLocals;
@@ -257,11 +257,6 @@ class MethodWriter extends MethodVisitor {
private int[] previousFrame;
/**
- * Index of the next element to be added in {@link #frame}.
- */
- private int frameIndex;
-
- /**
* The current stack map frame. The first element contains the offset of the
* instruction to which the frame corresponds, the second element is the
* number of locals and the third one is the number of stack elements. The
@@ -357,7 +352,8 @@ class MethodWriter extends MethodVisitor {
* A list of labels. This list is the list of basic blocks in the method,
* i.e. a list of Label objects linked to each other by their
* {@link Label#successor} field, in the order they are visited by
- * {@link MethodVisitor#visitLabel}, and starting with the first basic block.
+ * {@link MethodVisitor#visitLabel}, and starting with the first basic
+ * block.
*/
private Label labels;
@@ -396,28 +392,30 @@ class MethodWriter extends MethodVisitor {
/**
* Constructs a new {@link MethodWriter}.
*
- * @param cw the class writer in which the method must be added.
- * @param access the method's access flags (see {@link Opcodes}).
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type}).
- * @param signature the method's signature. May be <tt>null</tt>.
- * @param exceptions the internal names of the method's exceptions. May be
- * <tt>null</tt>.
- * @param computeMaxs <tt>true</tt> if the maximum stack size and number
- * of local variables must be automatically computed.
- * @param computeFrames <tt>true</tt> if the stack map tables must be
- * recomputed from scratch.
- */
- MethodWriter(
- final ClassWriter cw,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions,
- final boolean computeMaxs,
- final boolean computeFrames)
- {
+ * @param cw
+ * the class writer in which the method must be added.
+ * @param access
+ * the method's access flags (see {@link Opcodes}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt>.
+ * @param exceptions
+ * the internal names of the method's exceptions. May be
+ * <tt>null</tt>.
+ * @param computeMaxs
+ * <tt>true</tt> if the maximum stack size and number of local
+ * variables must be automatically computed.
+ * @param computeFrames
+ * <tt>true</tt> if the stack map tables must be recomputed from
+ * scratch.
+ */
+ MethodWriter(final ClassWriter cw, final int access, final String name,
+ final String desc, final String signature,
+ final String[] exceptions, final boolean computeMaxs,
+ final boolean computeFrames) {
super(Opcodes.ASM4);
if (cw.firstMethod == null) {
cw.firstMethod = this;
@@ -427,6 +425,9 @@ class MethodWriter extends MethodVisitor {
cw.lastMethod = this;
this.cw = cw;
this.access = access;
+ if ("<init>".equals(name)) {
+ this.access |= ACC_CONSTRUCTOR;
+ }
this.name = cw.newUTF8(name);
this.desc = cw.newUTF8(desc);
this.descriptor = desc;
@@ -442,9 +443,6 @@ class MethodWriter extends MethodVisitor {
}
this.compute = computeFrames ? FRAMES : (computeMaxs ? MAXS : NOTHING);
if (computeMaxs || computeFrames) {
- if (computeFrames && "<init>".equals(name)) {
- this.access |= ACC_CONSTRUCTOR;
- }
// updates maxLocals
int size = Type.getArgumentsAndReturnSizes(descriptor) >> 2;
if ((access & Opcodes.ACC_STATIC) != 0) {
@@ -473,10 +471,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -495,11 +491,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
if (!ClassReader.ANNOTATIONS) {
return null;
}
@@ -545,20 +538,18 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
if (!ClassReader.FRAMES || compute == FRAMES) {
return;
}
if (type == Opcodes.F_NEW) {
+ if (previousFrame == null) {
+ visitImplicitFirstFrame();
+ }
currentLocals = nLocal;
- startFrame(code.length, nLocal, nStack);
+ int frameIndex = startFrame(code.length, nLocal, nStack);
for (int i = 0; i < nLocal; ++i) {
if (local[i] instanceof String) {
frame[frameIndex++] = Frame.OBJECT
@@ -601,48 +592,44 @@ class MethodWriter extends MethodVisitor {
}
switch (type) {
- case Opcodes.F_FULL:
- currentLocals = nLocal;
- stackMap.putByte(FULL_FRAME)
- .putShort(delta)
- .putShort(nLocal);
- for (int i = 0; i < nLocal; ++i) {
- writeFrameType(local[i]);
- }
- stackMap.putShort(nStack);
- for (int i = 0; i < nStack; ++i) {
- writeFrameType(stack[i]);
- }
- break;
- case Opcodes.F_APPEND:
- currentLocals += nLocal;
- stackMap.putByte(SAME_FRAME_EXTENDED + nLocal)
- .putShort(delta);
- for (int i = 0; i < nLocal; ++i) {
- writeFrameType(local[i]);
- }
- break;
- case Opcodes.F_CHOP:
- currentLocals -= nLocal;
- stackMap.putByte(SAME_FRAME_EXTENDED - nLocal)
+ case Opcodes.F_FULL:
+ currentLocals = nLocal;
+ stackMap.putByte(FULL_FRAME).putShort(delta).putShort(nLocal);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ stackMap.putShort(nStack);
+ for (int i = 0; i < nStack; ++i) {
+ writeFrameType(stack[i]);
+ }
+ break;
+ case Opcodes.F_APPEND:
+ currentLocals += nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED + nLocal).putShort(delta);
+ for (int i = 0; i < nLocal; ++i) {
+ writeFrameType(local[i]);
+ }
+ break;
+ case Opcodes.F_CHOP:
+ currentLocals -= nLocal;
+ stackMap.putByte(SAME_FRAME_EXTENDED - nLocal).putShort(delta);
+ break;
+ case Opcodes.F_SAME:
+ if (delta < 64) {
+ stackMap.putByte(delta);
+ } else {
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ }
+ break;
+ case Opcodes.F_SAME1:
+ if (delta < 64) {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ } else {
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
.putShort(delta);
- break;
- case Opcodes.F_SAME:
- if (delta < 64) {
- stackMap.putByte(delta);
- } else {
- stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
- }
- break;
- case Opcodes.F_SAME1:
- if (delta < 64) {
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
- } else {
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
- .putShort(delta);
- }
- writeFrameType(stack[0]);
- break;
+ }
+ writeFrameType(stack[0]);
+ break;
}
previousFrameOffset = code.length;
@@ -672,8 +659,7 @@ class MethodWriter extends MethodVisitor {
}
// if opcode == ATHROW or xRETURN, ends current block (no successor)
if ((opcode >= Opcodes.IRETURN && opcode <= Opcodes.RETURN)
- || opcode == Opcodes.ATHROW)
- {
+ || opcode == Opcodes.ATHROW) {
noSuccessor();
}
}
@@ -731,8 +717,7 @@ class MethodWriter extends MethodVisitor {
// updates max locals
int n;
if (opcode == Opcodes.LLOAD || opcode == Opcodes.DLOAD
- || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE)
- {
+ || opcode == Opcodes.LSTORE || opcode == Opcodes.DSTORE) {
n = var + 2;
} else {
n = var + 1;
@@ -784,12 +769,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
Item i = cw.newFieldItem(owner, name, desc);
// Label currentBlock = this.currentBlock;
if (currentBlock != null) {
@@ -800,19 +781,19 @@ class MethodWriter extends MethodVisitor {
// computes the stack size variation
char c = desc.charAt(0);
switch (opcode) {
- case Opcodes.GETSTATIC:
- size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
- break;
- case Opcodes.PUTSTATIC:
- size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
- break;
- case Opcodes.GETFIELD:
- size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
- break;
- // case Constants.PUTFIELD:
- default:
- size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
- break;
+ case Opcodes.GETSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? 2 : 1);
+ break;
+ case Opcodes.PUTSTATIC:
+ size = stackSize + (c == 'D' || c == 'J' ? -2 : -1);
+ break;
+ case Opcodes.GETFIELD:
+ size = stackSize + (c == 'D' || c == 'J' ? 1 : 0);
+ break;
+ // case Constants.PUTFIELD:
+ default:
+ size = stackSize + (c == 'D' || c == 'J' ? -3 : -2);
+ break;
}
// updates current and max stack sizes
if (size > maxStackSize) {
@@ -826,12 +807,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
boolean itf = opcode == Opcodes.INVOKEINTERFACE;
Item i = cw.newMethodItem(owner, name, desc, itf);
int argSize = i.intVal;
@@ -882,12 +859,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitInvokeDynamicInsn(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
Item i = cw.newInvokeDynamicItem(name, desc, bsm, bsmArgs);
int argSize = i.intVal;
// Label currentBlock = this.currentBlock;
@@ -967,8 +940,7 @@ class MethodWriter extends MethodVisitor {
}
// adds the instruction to the bytecode of the method
if ((label.status & Label.RESOLVED) != 0
- && label.position - code.length < Short.MIN_VALUE)
- {
+ && label.position - code.length < Short.MIN_VALUE) {
/*
* case of a backward jump with an offset < -32768. In this case we
* automatically replace GOTO with GOTO_W, JSR with JSR_W and IFxxx
@@ -986,8 +958,7 @@ class MethodWriter extends MethodVisitor {
if (nextInsn != null) {
nextInsn.status |= Label.TARGET;
}
- code.putByte(opcode <= 166
- ? ((opcode + 1) ^ 1) - 1
+ code.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
: opcode ^ 1);
code.putShort(8); // jump offset
code.putByte(200); // GOTO_W
@@ -1082,8 +1053,7 @@ class MethodWriter extends MethodVisitor {
} else {
int size;
// computes the stack size variation
- if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE)
- {
+ if (i.type == ClassWriter.LONG || i.type == ClassWriter.DOUBLE) {
size = stackSize + 2;
} else {
size = stackSize + 1;
@@ -1122,8 +1092,7 @@ class MethodWriter extends MethodVisitor {
}
// adds the instruction to the bytecode of the method
if ((var > 255) || (increment > 127) || (increment < -128)) {
- code.putByte(196 /* WIDE */)
- .put12(Opcodes.IINC, var)
+ code.putByte(196 /* WIDE */).put12(Opcodes.IINC, var)
.putShort(increment);
} else {
code.putByte(Opcodes.IINC).put11(var, increment);
@@ -1131,12 +1100,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.TABLESWITCH);
@@ -1151,11 +1116,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
// adds the instruction to the bytecode of the method
int source = code.length;
code.putByte(Opcodes.LOOKUPSWITCH);
@@ -1214,12 +1176,8 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
++handlerCount;
Handler h = new Handler();
h.start = start;
@@ -1236,14 +1194,9 @@ class MethodWriter extends MethodVisitor {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
if (signature != null) {
if (localVarType == null) {
localVarType = new ByteVector();
@@ -1251,8 +1204,7 @@ class MethodWriter extends MethodVisitor {
++localVarTypeCount;
localVarType.putShort(start.position)
.putShort(end.position - start.position)
- .putShort(cw.newUTF8(name))
- .putShort(cw.newUTF8(signature))
+ .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(signature))
.putShort(index);
}
if (localVar == null) {
@@ -1261,8 +1213,7 @@ class MethodWriter extends MethodVisitor {
++localVarCount;
localVar.putShort(start.position)
.putShort(end.position - start.position)
- .putShort(cw.newUTF8(name))
- .putShort(cw.newUTF8(desc))
+ .putShort(cw.newUTF8(name)).putShort(cw.newUTF8(desc))
.putShort(index);
if (compute != NOTHING) {
// updates max locals
@@ -1294,8 +1245,7 @@ class MethodWriter extends MethodVisitor {
Label h = handler.handler.getFirst();
Label e = handler.end.getFirst();
// computes the kind of the edges to 'h'
- String t = handler.desc == null
- ? "java/lang/Throwable"
+ String t = handler.desc == null ? "java/lang/Throwable"
: handler.desc;
int kind = Frame.OBJECT | cw.addType(t);
// h is an exception handler
@@ -1382,11 +1332,12 @@ class MethodWriter extends MethodVisitor {
}
code.data[end] = (byte) Opcodes.ATHROW;
// emits a frame for this unreachable block
- startFrame(start, 0, 1);
- frame[frameIndex++] = Frame.OBJECT
+ int frameIndex = startFrame(start, 0, 1);
+ frame[frameIndex] = Frame.OBJECT
| cw.addType("java/lang/Throwable");
endFrame();
- // removes the start-end range from the exception handlers
+ // removes the start-end range from the exception
+ // handlers
firstHandler = Handler.remove(firstHandler, l, k);
}
}
@@ -1535,8 +1486,10 @@ class MethodWriter extends MethodVisitor {
/**
* Adds a successor to the {@link #currentBlock currentBlock} block.
*
- * @param info information about the control flow edge to be added.
- * @param successor the successor block to be added to the current block.
+ * @param info
+ * information about the control flow edge to be added.
+ * @param successor
+ * the successor block to be added to the current block.
*/
private void addSuccessor(final int info, final Label successor) {
// creates and initializes an Edge object...
@@ -1573,7 +1526,8 @@ class MethodWriter extends MethodVisitor {
/**
* Visits a frame that has been computed from scratch.
*
- * @param f the frame that must be visited.
+ * @param f
+ * the frame that must be visited.
*/
private void visitFrame(final Frame f) {
int i, t;
@@ -1606,7 +1560,7 @@ class MethodWriter extends MethodVisitor {
}
}
// visits the frame and its content
- startFrame(f.owner.position, nLocal, nStack);
+ int frameIndex = startFrame(f.owner.position, nLocal, nStack);
for (i = 0; nLocal > 0; ++i, --nLocal) {
t = locals[i];
frame[frameIndex++] = t;
@@ -1625,15 +1579,78 @@ class MethodWriter extends MethodVisitor {
}
/**
+ * Visit the implicit first frame of this method.
+ */
+ private void visitImplicitFirstFrame() {
+ // There can be at most descriptor.length() + 1 locals
+ int frameIndex = startFrame(0, descriptor.length() + 1, 0);
+ if ((access & Opcodes.ACC_STATIC) == 0) {
+ if ((access & ACC_CONSTRUCTOR) == 0) {
+ frame[frameIndex++] = Frame.OBJECT | cw.addType(cw.thisName);
+ } else {
+ frame[frameIndex++] = 6; // Opcodes.UNINITIALIZED_THIS;
+ }
+ }
+ int i = 1;
+ loop: while (true) {
+ int j = i;
+ switch (descriptor.charAt(i++)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ frame[frameIndex++] = 1; // Opcodes.INTEGER;
+ break;
+ case 'F':
+ frame[frameIndex++] = 2; // Opcodes.FLOAT;
+ break;
+ case 'J':
+ frame[frameIndex++] = 4; // Opcodes.LONG;
+ break;
+ case 'D':
+ frame[frameIndex++] = 3; // Opcodes.DOUBLE;
+ break;
+ case '[':
+ while (descriptor.charAt(i) == '[') {
+ ++i;
+ }
+ if (descriptor.charAt(i) == 'L') {
+ ++i;
+ while (descriptor.charAt(i) != ';') {
+ ++i;
+ }
+ }
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType(descriptor.substring(j, ++i));
+ break;
+ case 'L':
+ while (descriptor.charAt(i) != ';') {
+ ++i;
+ }
+ frame[frameIndex++] = Frame.OBJECT
+ | cw.addType(descriptor.substring(j + 1, i++));
+ break;
+ default:
+ break loop;
+ }
+ }
+ frame[1] = frameIndex - 3;
+ endFrame();
+ }
+
+ /**
* Starts the visit of a stack map frame.
*
- * @param offset the offset of the instruction to which the frame
- * corresponds.
- * @param nLocal the number of local variables in the frame.
- * @param nStack the number of stack elements in the frame.
- */
- private void startFrame(final int offset, final int nLocal, final int nStack)
- {
+ * @param offset
+ * the offset of the instruction to which the frame corresponds.
+ * @param nLocal
+ * the number of local variables in the frame.
+ * @param nStack
+ * the number of stack elements in the frame.
+ * @return the index of the next element to be written in this frame.
+ */
+ private int startFrame(final int offset, final int nLocal, final int nStack) {
int n = 3 + nLocal + nStack;
if (frame == null || frame.length < n) {
frame = new int[n];
@@ -1641,7 +1658,7 @@ class MethodWriter extends MethodVisitor {
frame[0] = offset;
frame[1] = nLocal;
frame[2] = nStack;
- frameIndex = 3;
+ return 3;
}
/**
@@ -1686,24 +1703,23 @@ class MethodWriter extends MethodVisitor {
if (cstackSize == 0) {
k = clocalsSize - localsSize;
switch (k) {
- case -3:
- case -2:
- case -1:
- type = CHOP_FRAME;
- localsSize = clocalsSize;
- break;
- case 0:
- type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
- break;
- case 1:
- case 2:
- case 3:
- type = APPEND_FRAME;
- break;
+ case -3:
+ case -2:
+ case -1:
+ type = CHOP_FRAME;
+ localsSize = clocalsSize;
+ break;
+ case 0:
+ type = delta < 64 ? SAME_FRAME : SAME_FRAME_EXTENDED;
+ break;
+ case 1:
+ case 2:
+ case 3:
+ type = APPEND_FRAME;
+ break;
}
} else if (clocalsSize == localsSize && cstackSize == 1) {
- type = delta < 63
- ? SAME_LOCALS_1_STACK_ITEM_FRAME
+ type = delta < 63 ? SAME_LOCALS_1_STACK_ITEM_FRAME
: SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED;
}
if (type != FULL_FRAME) {
@@ -1718,36 +1734,34 @@ class MethodWriter extends MethodVisitor {
}
}
switch (type) {
- case SAME_FRAME:
- stackMap.putByte(delta);
- break;
- case SAME_LOCALS_1_STACK_ITEM_FRAME:
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
- writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
- break;
- case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
- stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED)
- .putShort(delta);
- writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
- break;
- case SAME_FRAME_EXTENDED:
- stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
- break;
- case CHOP_FRAME:
- stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
- break;
- case APPEND_FRAME:
- stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
- writeFrameTypes(3 + localsSize, 3 + clocalsSize);
- break;
- // case FULL_FRAME:
- default:
- stackMap.putByte(FULL_FRAME)
- .putShort(delta)
- .putShort(clocalsSize);
- writeFrameTypes(3, 3 + clocalsSize);
- stackMap.putShort(cstackSize);
- writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
+ case SAME_FRAME:
+ stackMap.putByte(delta);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME + delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED:
+ stackMap.putByte(SAME_LOCALS_1_STACK_ITEM_FRAME_EXTENDED).putShort(
+ delta);
+ writeFrameTypes(3 + clocalsSize, 4 + clocalsSize);
+ break;
+ case SAME_FRAME_EXTENDED:
+ stackMap.putByte(SAME_FRAME_EXTENDED).putShort(delta);
+ break;
+ case CHOP_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ break;
+ case APPEND_FRAME:
+ stackMap.putByte(SAME_FRAME_EXTENDED + k).putShort(delta);
+ writeFrameTypes(3 + localsSize, 3 + clocalsSize);
+ break;
+ // case FULL_FRAME:
+ default:
+ stackMap.putByte(FULL_FRAME).putShort(delta).putShort(clocalsSize);
+ writeFrameTypes(3, 3 + clocalsSize);
+ stackMap.putShort(cstackSize);
+ writeFrameTypes(3 + clocalsSize, 3 + clocalsSize + cstackSize);
}
}
@@ -1757,8 +1771,10 @@ class MethodWriter extends MethodVisitor {
* in {@link Label} to the format used in StackMapTable attributes. In
* particular, it converts type table indexes to constant pool indexes.
*
- * @param start index of the first type in {@link #frame} to write.
- * @param end index of last type in {@link #frame} to write (exclusive).
+ * @param start
+ * index of the first type in {@link #frame} to write.
+ * @param end
+ * index of last type in {@link #frame} to write (exclusive).
*/
private void writeFrameTypes(final int start, final int end) {
for (int i = start; i < end; ++i) {
@@ -1767,15 +1783,15 @@ class MethodWriter extends MethodVisitor {
if (d == 0) {
int v = t & Frame.BASE_VALUE;
switch (t & Frame.BASE_KIND) {
- case Frame.OBJECT:
- stackMap.putByte(7)
- .putShort(cw.newClass(cw.typeTable[v].strVal1));
- break;
- case Frame.UNINITIALIZED:
- stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
- break;
- default:
- stackMap.putByte(v);
+ case Frame.OBJECT:
+ stackMap.putByte(7).putShort(
+ cw.newClass(cw.typeTable[v].strVal1));
+ break;
+ case Frame.UNINITIALIZED:
+ stackMap.putByte(8).putShort(cw.typeTable[v].intVal);
+ break;
+ default:
+ stackMap.putByte(v);
}
} else {
StringBuffer buf = new StringBuffer();
@@ -1789,29 +1805,29 @@ class MethodWriter extends MethodVisitor {
buf.append(';');
} else {
switch (t & 0xF) {
- case 1:
- buf.append('I');
- break;
- case 2:
- buf.append('F');
- break;
- case 3:
- buf.append('D');
- break;
- case 9:
- buf.append('Z');
- break;
- case 10:
- buf.append('B');
- break;
- case 11:
- buf.append('C');
- break;
- case 12:
- buf.append('S');
- break;
- default:
- buf.append('J');
+ case 1:
+ buf.append('I');
+ break;
+ case 2:
+ buf.append('F');
+ break;
+ case 3:
+ buf.append('D');
+ break;
+ case 9:
+ buf.append('Z');
+ break;
+ case 10:
+ buf.append('B');
+ break;
+ case 11:
+ buf.append('C');
+ break;
+ case 12:
+ buf.append('S');
+ break;
+ default:
+ buf.append('J');
}
}
stackMap.putByte(7).putShort(cw.newClass(buf.toString()));
@@ -1875,10 +1891,7 @@ class MethodWriter extends MethodVisitor {
size += 8 + stackMap.length;
}
if (cattrs != null) {
- size += cattrs.getSize(cw,
- code.data,
- code.length,
- maxStack,
+ size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
}
}
@@ -1886,11 +1899,12 @@ class MethodWriter extends MethodVisitor {
cw.newUTF8("Exceptions");
size += 8 + 2 * exceptionCount;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- cw.newUTF8("Synthetic");
- size += 6;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ cw.newUTF8("Synthetic");
+ size += 6;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
cw.newUTF8("Deprecated");
@@ -1936,13 +1950,15 @@ class MethodWriter extends MethodVisitor {
/**
* Puts the bytecode of this method in the given byte vector.
*
- * @param out the byte vector into which the bytecode of this method must be
- * copied.
+ * @param out
+ * the byte vector into which the bytecode of this method must be
+ * copied.
*/
final void put(final ByteVector out) {
- int mask = Opcodes.ACC_DEPRECATED
+ final int FACTOR = ClassWriter.TO_ACC_SYNTHETIC;
+ int mask = ACC_CONSTRUCTOR | Opcodes.ACC_DEPRECATED
| ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / (ClassWriter.ACC_SYNTHETIC_ATTRIBUTE / Opcodes.ACC_SYNTHETIC));
+ | ((access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) / FACTOR);
out.putShort(access & ~mask).putShort(name).putShort(desc);
if (classReaderOffset != 0) {
out.putByteArray(cw.cr.b, classReaderOffset, classReaderLength);
@@ -1955,10 +1971,11 @@ class MethodWriter extends MethodVisitor {
if (exceptionCount > 0) {
++attributeCount;
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- ++attributeCount;
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ ++attributeCount;
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
++attributeCount;
@@ -2000,10 +2017,7 @@ class MethodWriter extends MethodVisitor {
size += 8 + stackMap.length;
}
if (cattrs != null) {
- size += cattrs.getSize(cw,
- code.data,
- code.length,
- maxStack,
+ size += cattrs.getSize(cw, code.data, code.length, maxStack,
maxLocals);
}
out.putShort(cw.newUTF8("Code")).putInt(size);
@@ -2013,10 +2027,8 @@ class MethodWriter extends MethodVisitor {
if (handlerCount > 0) {
Handler h = firstHandler;
while (h != null) {
- out.putShort(h.start.position)
- .putShort(h.end.position)
- .putShort(h.handler.position)
- .putShort(h.type);
+ out.putShort(h.start.position).putShort(h.end.position)
+ .putShort(h.handler.position).putShort(h.type);
h = h.next;
}
}
@@ -2063,24 +2075,24 @@ class MethodWriter extends MethodVisitor {
}
}
if (exceptionCount > 0) {
- out.putShort(cw.newUTF8("Exceptions"))
- .putInt(2 * exceptionCount + 2);
+ out.putShort(cw.newUTF8("Exceptions")).putInt(
+ 2 * exceptionCount + 2);
out.putShort(exceptionCount);
for (int i = 0; i < exceptionCount; ++i) {
out.putShort(exceptions[i]);
}
}
- if ((access & Opcodes.ACC_SYNTHETIC) != 0
- && ((cw.version & 0xFFFF) < Opcodes.V1_5 || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0))
- {
- out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ if ((cw.version & 0xFFFF) < Opcodes.V1_5
+ || (access & ClassWriter.ACC_SYNTHETIC_ATTRIBUTE) != 0) {
+ out.putShort(cw.newUTF8("Synthetic")).putInt(0);
+ }
}
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
out.putShort(cw.newUTF8("Deprecated")).putInt(0);
}
if (ClassReader.SIGNATURES && signature != null) {
- out.putShort(cw.newUTF8("Signature"))
- .putInt(2)
+ out.putShort(cw.newUTF8("Signature")).putInt(2)
.putShort(cw.newUTF8(signature));
}
if (ClassReader.ANNOTATIONS && annd != null) {
@@ -2123,10 +2135,12 @@ class MethodWriter extends MethodVisitor {
* 32768, in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO_W
* 32765. This, in turn, may require to increase the size of another jump
* instruction, and so on... All these operations are handled automatically
- * by this method. <p> <i>This method must be called after all the method
- * that is being built has been visited</i>. In particular, the
- * {@link Label Label} objects used to construct the method are no longer
- * valid after this method has been called.
+ * by this method.
+ * <p>
+ * <i>This method must be called after all the method that is being built
+ * has been visited</i>. In particular, the {@link Label Label} objects used
+ * to construct the method are no longer valid after this method has been
+ * called.
*/
private void resizeInstructions() {
byte[] b = code.data; // bytecode of the method
@@ -2176,158 +2190,14 @@ class MethodWriter extends MethodVisitor {
int insert = 0; // bytes to be added after this instruction
switch (ClassWriter.TYPE[opcode]) {
- case ClassWriter.NOARG_INSN:
- case ClassWriter.IMPLVAR_INSN:
- u += 1;
- break;
- case ClassWriter.LABEL_INSN:
- if (opcode > 201) {
- // converts temporary opcodes 202 to 217, 218 and
- // 219 to IFEQ ... JSR (inclusive), IFNULL and
- // IFNONNULL
- opcode = opcode < 218 ? opcode - 49 : opcode - 20;
- label = u + readUnsignedShort(b, u + 1);
- } else {
- label = u + readShort(b, u + 1);
- }
- newOffset = getNewOffset(allIndexes, allSizes, u, label);
- if (newOffset < Short.MIN_VALUE
- || newOffset > Short.MAX_VALUE)
- {
- if (!resize[u]) {
- if (opcode == Opcodes.GOTO
- || opcode == Opcodes.JSR)
- {
- // two additional bytes will be required to
- // replace this GOTO or JSR instruction with
- // a GOTO_W or a JSR_W
- insert = 2;
- } else {
- // five additional bytes will be required to
- // replace this IFxxx <l> instruction with
- // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
- // is the "opposite" opcode of IFxxx (i.e.,
- // IFNE for IFEQ) and where <l'> designates
- // the instruction just after the GOTO_W.
- insert = 5;
- }
- resize[u] = true;
- }
- }
- u += 3;
- break;
- case ClassWriter.LABELW_INSN:
- u += 5;
- break;
- case ClassWriter.TABL_INSN:
- if (state == 1) {
- // true number of bytes to be added (or removed)
- // from this instruction = (future number of padding
- // bytes - current number of padding byte) -
- // previously over estimated variation =
- // = ((3 - newOffset%4) - (3 - u%4)) - u%4
- // = (-newOffset%4 + u%4) - u%4
- // = -(newOffset & 3)
- newOffset = getNewOffset(allIndexes, allSizes, 0, u);
- insert = -(newOffset & 3);
- } else if (!resize[u]) {
- // over estimation of the number of bytes to be
- // added to this instruction = 3 - current number
- // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
- insert = u & 3;
- resize[u] = true;
- }
- // skips instruction
- u = u + 4 - (u & 3);
- u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
- break;
- case ClassWriter.LOOK_INSN:
- if (state == 1) {
- // like TABL_INSN
- newOffset = getNewOffset(allIndexes, allSizes, 0, u);
- insert = -(newOffset & 3);
- } else if (!resize[u]) {
- // like TABL_INSN
- insert = u & 3;
- resize[u] = true;
- }
- // skips instruction
- u = u + 4 - (u & 3);
- u += 8 * readInt(b, u + 4) + 8;
- break;
- case ClassWriter.WIDE_INSN:
- opcode = b[u + 1] & 0xFF;
- if (opcode == Opcodes.IINC) {
- u += 6;
- } else {
- u += 4;
- }
- break;
- case ClassWriter.VAR_INSN:
- case ClassWriter.SBYTE_INSN:
- case ClassWriter.LDC_INSN:
- u += 2;
- break;
- case ClassWriter.SHORT_INSN:
- case ClassWriter.LDCW_INSN:
- case ClassWriter.FIELDORMETH_INSN:
- case ClassWriter.TYPE_INSN:
- case ClassWriter.IINC_INSN:
- u += 3;
- break;
- case ClassWriter.ITFMETH_INSN:
- case ClassWriter.INDYMETH_INSN:
- u += 5;
- break;
- // case ClassWriter.MANA_INSN:
- default:
- u += 4;
- break;
- }
- if (insert != 0) {
- // adds a new (u, insert) entry in the allIndexes and
- // allSizes arrays
- int[] newIndexes = new int[allIndexes.length + 1];
- int[] newSizes = new int[allSizes.length + 1];
- System.arraycopy(allIndexes,
- 0,
- newIndexes,
- 0,
- allIndexes.length);
- System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
- newIndexes[allIndexes.length] = u;
- newSizes[allSizes.length] = insert;
- allIndexes = newIndexes;
- allSizes = newSizes;
- if (insert > 0) {
- state = 3;
- }
- }
- }
- if (state < 3) {
- --state;
- }
- } while (state != 0);
-
- // 2nd step:
- // copies the bytecode of the method into a new bytevector, updates the
- // offsets, and inserts (or removes) bytes as requested.
-
- ByteVector newCode = new ByteVector(code.length);
-
- u = 0;
- while (u < code.length) {
- int opcode = b[u] & 0xFF;
- switch (ClassWriter.TYPE[opcode]) {
case ClassWriter.NOARG_INSN:
case ClassWriter.IMPLVAR_INSN:
- newCode.putByte(opcode);
u += 1;
break;
case ClassWriter.LABEL_INSN:
if (opcode > 201) {
- // changes temporary opcodes 202 to 217 (inclusive), 218
- // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // converts temporary opcodes 202 to 217, 218 and
+ // 219 to IFEQ ... JSR (inclusive), IFNULL and
// IFNONNULL
opcode = opcode < 218 ? opcode - 49 : opcode - 20;
label = u + readUnsignedShort(b, u + 1);
@@ -2335,100 +2205,78 @@ class MethodWriter extends MethodVisitor {
label = u + readShort(b, u + 1);
}
newOffset = getNewOffset(allIndexes, allSizes, u, label);
- if (resize[u]) {
- // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
- // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
- // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
- // and where <l'> designates the instruction just after
- // the GOTO_W.
- if (opcode == Opcodes.GOTO) {
- newCode.putByte(200); // GOTO_W
- } else if (opcode == Opcodes.JSR) {
- newCode.putByte(201); // JSR_W
- } else {
- newCode.putByte(opcode <= 166
- ? ((opcode + 1) ^ 1) - 1
- : opcode ^ 1);
- newCode.putShort(8); // jump offset
- newCode.putByte(200); // GOTO_W
- // newOffset now computed from start of GOTO_W
- newOffset -= 3;
+ if (newOffset < Short.MIN_VALUE
+ || newOffset > Short.MAX_VALUE) {
+ if (!resize[u]) {
+ if (opcode == Opcodes.GOTO || opcode == Opcodes.JSR) {
+ // two additional bytes will be required to
+ // replace this GOTO or JSR instruction with
+ // a GOTO_W or a JSR_W
+ insert = 2;
+ } else {
+ // five additional bytes will be required to
+ // replace this IFxxx <l> instruction with
+ // IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx
+ // is the "opposite" opcode of IFxxx (i.e.,
+ // IFNE for IFEQ) and where <l'> designates
+ // the instruction just after the GOTO_W.
+ insert = 5;
+ }
+ resize[u] = true;
}
- newCode.putInt(newOffset);
- } else {
- newCode.putByte(opcode);
- newCode.putShort(newOffset);
}
u += 3;
break;
case ClassWriter.LABELW_INSN:
- label = u + readInt(b, u + 1);
- newOffset = getNewOffset(allIndexes, allSizes, u, label);
- newCode.putByte(opcode);
- newCode.putInt(newOffset);
u += 5;
break;
case ClassWriter.TABL_INSN:
- // skips 0 to 3 padding bytes
- v = u;
- u = u + 4 - (v & 3);
- // reads and copies instruction
- newCode.putByte(Opcodes.TABLESWITCH);
- newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
- j = readInt(b, u);
- u += 4;
- newCode.putInt(j);
- j = readInt(b, u) - j + 1;
- u += 4;
- newCode.putInt(readInt(b, u - 4));
- for (; j > 0; --j) {
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
+ if (state == 1) {
+ // true number of bytes to be added (or removed)
+ // from this instruction = (future number of padding
+ // bytes - current number of padding byte) -
+ // previously over estimated variation =
+ // = ((3 - newOffset%4) - (3 - u%4)) - u%4
+ // = (-newOffset%4 + u%4) - u%4
+ // = -(newOffset & 3)
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // over estimation of the number of bytes to be
+ // added to this instruction = 3 - current number
+ // of padding bytes = 3 - (3 - u%4) = u%4 = u & 3
+ insert = u & 3;
+ resize[u] = true;
}
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 4 * (readInt(b, u + 8) - readInt(b, u + 4) + 1) + 12;
break;
case ClassWriter.LOOK_INSN:
- // skips 0 to 3 padding bytes
- v = u;
- u = u + 4 - (v & 3);
- // reads and copies instruction
- newCode.putByte(Opcodes.LOOKUPSWITCH);
- newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
- j = readInt(b, u);
- u += 4;
- newCode.putInt(j);
- for (; j > 0; --j) {
- newCode.putInt(readInt(b, u));
- u += 4;
- label = v + readInt(b, u);
- u += 4;
- newOffset = getNewOffset(allIndexes, allSizes, v, label);
- newCode.putInt(newOffset);
+ if (state == 1) {
+ // like TABL_INSN
+ newOffset = getNewOffset(allIndexes, allSizes, 0, u);
+ insert = -(newOffset & 3);
+ } else if (!resize[u]) {
+ // like TABL_INSN
+ insert = u & 3;
+ resize[u] = true;
}
+ // skips instruction
+ u = u + 4 - (u & 3);
+ u += 8 * readInt(b, u + 4) + 8;
break;
case ClassWriter.WIDE_INSN:
opcode = b[u + 1] & 0xFF;
if (opcode == Opcodes.IINC) {
- newCode.putByteArray(b, u, 6);
u += 6;
} else {
- newCode.putByteArray(b, u, 4);
u += 4;
}
break;
case ClassWriter.VAR_INSN:
case ClassWriter.SBYTE_INSN:
case ClassWriter.LDC_INSN:
- newCode.putByteArray(b, u, 2);
u += 2;
break;
case ClassWriter.SHORT_INSN:
@@ -2436,19 +2284,178 @@ class MethodWriter extends MethodVisitor {
case ClassWriter.FIELDORMETH_INSN:
case ClassWriter.TYPE_INSN:
case ClassWriter.IINC_INSN:
- newCode.putByteArray(b, u, 3);
u += 3;
break;
case ClassWriter.ITFMETH_INSN:
case ClassWriter.INDYMETH_INSN:
- newCode.putByteArray(b, u, 5);
u += 5;
break;
- // case MANA_INSN:
+ // case ClassWriter.MANA_INSN:
default:
- newCode.putByteArray(b, u, 4);
u += 4;
break;
+ }
+ if (insert != 0) {
+ // adds a new (u, insert) entry in the allIndexes and
+ // allSizes arrays
+ int[] newIndexes = new int[allIndexes.length + 1];
+ int[] newSizes = new int[allSizes.length + 1];
+ System.arraycopy(allIndexes, 0, newIndexes, 0,
+ allIndexes.length);
+ System.arraycopy(allSizes, 0, newSizes, 0, allSizes.length);
+ newIndexes[allIndexes.length] = u;
+ newSizes[allSizes.length] = insert;
+ allIndexes = newIndexes;
+ allSizes = newSizes;
+ if (insert > 0) {
+ state = 3;
+ }
+ }
+ }
+ if (state < 3) {
+ --state;
+ }
+ } while (state != 0);
+
+ // 2nd step:
+ // copies the bytecode of the method into a new bytevector, updates the
+ // offsets, and inserts (or removes) bytes as requested.
+
+ ByteVector newCode = new ByteVector(code.length);
+
+ u = 0;
+ while (u < code.length) {
+ int opcode = b[u] & 0xFF;
+ switch (ClassWriter.TYPE[opcode]) {
+ case ClassWriter.NOARG_INSN:
+ case ClassWriter.IMPLVAR_INSN:
+ newCode.putByte(opcode);
+ u += 1;
+ break;
+ case ClassWriter.LABEL_INSN:
+ if (opcode > 201) {
+ // changes temporary opcodes 202 to 217 (inclusive), 218
+ // and 219 to IFEQ ... JSR (inclusive), IFNULL and
+ // IFNONNULL
+ opcode = opcode < 218 ? opcode - 49 : opcode - 20;
+ label = u + readUnsignedShort(b, u + 1);
+ } else {
+ label = u + readShort(b, u + 1);
+ }
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ if (resize[u]) {
+ // replaces GOTO with GOTO_W, JSR with JSR_W and IFxxx
+ // <l> with IFNOTxxx <l'> GOTO_W <l>, where IFNOTxxx is
+ // the "opposite" opcode of IFxxx (i.e., IFNE for IFEQ)
+ // and where <l'> designates the instruction just after
+ // the GOTO_W.
+ if (opcode == Opcodes.GOTO) {
+ newCode.putByte(200); // GOTO_W
+ } else if (opcode == Opcodes.JSR) {
+ newCode.putByte(201); // JSR_W
+ } else {
+ newCode.putByte(opcode <= 166 ? ((opcode + 1) ^ 1) - 1
+ : opcode ^ 1);
+ newCode.putShort(8); // jump offset
+ newCode.putByte(200); // GOTO_W
+ // newOffset now computed from start of GOTO_W
+ newOffset -= 3;
+ }
+ newCode.putInt(newOffset);
+ } else {
+ newCode.putByte(opcode);
+ newCode.putShort(newOffset);
+ }
+ u += 3;
+ break;
+ case ClassWriter.LABELW_INSN:
+ label = u + readInt(b, u + 1);
+ newOffset = getNewOffset(allIndexes, allSizes, u, label);
+ newCode.putByte(opcode);
+ newCode.putInt(newOffset);
+ u += 5;
+ break;
+ case ClassWriter.TABL_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.TABLESWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ j = readInt(b, u) - j + 1;
+ u += 4;
+ newCode.putInt(readInt(b, u - 4));
+ for (; j > 0; --j) {
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.LOOK_INSN:
+ // skips 0 to 3 padding bytes
+ v = u;
+ u = u + 4 - (v & 3);
+ // reads and copies instruction
+ newCode.putByte(Opcodes.LOOKUPSWITCH);
+ newCode.putByteArray(null, 0, (4 - newCode.length % 4) % 4);
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ j = readInt(b, u);
+ u += 4;
+ newCode.putInt(j);
+ for (; j > 0; --j) {
+ newCode.putInt(readInt(b, u));
+ u += 4;
+ label = v + readInt(b, u);
+ u += 4;
+ newOffset = getNewOffset(allIndexes, allSizes, v, label);
+ newCode.putInt(newOffset);
+ }
+ break;
+ case ClassWriter.WIDE_INSN:
+ opcode = b[u + 1] & 0xFF;
+ if (opcode == Opcodes.IINC) {
+ newCode.putByteArray(b, u, 6);
+ u += 6;
+ } else {
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ }
+ break;
+ case ClassWriter.VAR_INSN:
+ case ClassWriter.SBYTE_INSN:
+ case ClassWriter.LDC_INSN:
+ newCode.putByteArray(b, u, 2);
+ u += 2;
+ break;
+ case ClassWriter.SHORT_INSN:
+ case ClassWriter.LDCW_INSN:
+ case ClassWriter.FIELDORMETH_INSN:
+ case ClassWriter.TYPE_INSN:
+ case ClassWriter.IINC_INSN:
+ newCode.putByteArray(b, u, 3);
+ u += 3;
+ break;
+ case ClassWriter.ITFMETH_INSN:
+ case ClassWriter.INDYMETH_INSN:
+ newCode.putByteArray(b, u, 5);
+ u += 5;
+ break;
+ // case MANA_INSN:
+ default:
+ newCode.putByteArray(b, u, 4);
+ u += 4;
+ break;
}
}
@@ -2471,8 +2478,7 @@ class MethodWriter extends MethodVisitor {
* must therefore never have been called for this label.
*/
u = l.position - 3;
- if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u]))
- {
+ if ((l.status & Label.STORE) != 0 || (u >= 0 && resize[u])) {
getNewOffset(allIndexes, allSizes, l);
// TODO update offsets in UNINITIALIZED values
visitFrame(l.frame);
@@ -2528,10 +2534,11 @@ class MethodWriter extends MethodVisitor {
b = lineNumber.data;
u = 0;
while (u < lineNumber.length) {
- writeShort(b, u, getNewOffset(allIndexes,
- allSizes,
- 0,
- readUnsignedShort(b, u)));
+ writeShort(
+ b,
+ u,
+ getNewOffset(allIndexes, allSizes, 0,
+ readUnsignedShort(b, u)));
u += 4;
}
}
@@ -2554,8 +2561,10 @@ class MethodWriter extends MethodVisitor {
/**
* Reads an unsigned short value in the given byte array.
*
- * @param b a byte array.
- * @param index the start index of the value to be read.
+ * @param b
+ * a byte array.
+ * @param index
+ * the start index of the value to be read.
* @return the read value.
*/
static int readUnsignedShort(final byte[] b, final int index) {
@@ -2565,8 +2574,10 @@ class MethodWriter extends MethodVisitor {
/**
* Reads a signed short value in the given byte array.
*
- * @param b a byte array.
- * @param index the start index of the value to be read.
+ * @param b
+ * a byte array.
+ * @param index
+ * the start index of the value to be read.
* @return the read value.
*/
static short readShort(final byte[] b, final int index) {
@@ -2576,8 +2587,10 @@ class MethodWriter extends MethodVisitor {
/**
* Reads a signed int value in the given byte array.
*
- * @param b a byte array.
- * @param index the start index of the value to be read.
+ * @param b
+ * a byte array.
+ * @param index
+ * the start index of the value to be read.
* @return the read value.
*/
static int readInt(final byte[] b, final int index) {
@@ -2588,9 +2601,12 @@ class MethodWriter extends MethodVisitor {
/**
* Writes a short value in the given byte array.
*
- * @param b a byte array.
- * @param index where the first byte of the short value must be written.
- * @param s the value to be written in the given byte array.
+ * @param b
+ * a byte array.
+ * @param index
+ * where the first byte of the short value must be written.
+ * @param s
+ * the value to be written in the given byte array.
*/
static void writeShort(final byte[] b, final int index, final int s) {
b[index] = (byte) (s >>> 8);
@@ -2598,32 +2614,34 @@ class MethodWriter extends MethodVisitor {
}
/**
- * Computes the future value of a bytecode offset. <p> Note: it is possible
- * to have several entries for the same instruction in the <tt>indexes</tt>
- * and <tt>sizes</tt>: two entries (index=a,size=b) and (index=a,size=b')
- * are equivalent to a single entry (index=a,size=b+b').
+ * Computes the future value of a bytecode offset.
+ * <p>
+ * Note: it is possible to have several entries for the same instruction in
+ * the <tt>indexes</tt> and <tt>sizes</tt>: two entries (index=a,size=b) and
+ * (index=a,size=b') are equivalent to a single entry (index=a,size=b+b').
*
- * @param indexes current positions of the instructions to be resized. Each
- * instruction must be designated by the index of its <i>last</i>
- * byte, plus one (or, in other words, by the index of the <i>first</i>
- * byte of the <i>next</i> instruction).
- * @param sizes the number of bytes to be <i>added</i> to the above
- * instructions. More precisely, for each i < <tt>len</tt>,
- * <tt>sizes</tt>[i] bytes will be added at the end of the
- * instruction designated by <tt>indexes</tt>[i] or, if
- * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
- * bytes of the instruction will be removed (the instruction size
- * <i>must not</i> become negative or null).
- * @param begin index of the first byte of the source instruction.
- * @param end index of the first byte of the target instruction.
+ * @param indexes
+ * current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the
+ * <i>first</i> byte of the <i>next</i> instruction).
+ * @param sizes
+ * the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |
+ * <tt>sizes[i]</tt>| bytes of the instruction will be removed
+ * (the instruction size <i>must not</i> become negative or
+ * null).
+ * @param begin
+ * index of the first byte of the source instruction.
+ * @param end
+ * index of the first byte of the target instruction.
* @return the future value of the given bytecode offset.
*/
- static int getNewOffset(
- final int[] indexes,
- final int[] sizes,
- final int begin,
- final int end)
- {
+ static int getNewOffset(final int[] indexes, final int[] sizes,
+ final int begin, final int end) {
int offset = end - begin;
for (int i = 0; i < indexes.length; ++i) {
if (begin < indexes[i] && indexes[i] <= end) {
@@ -2640,24 +2658,25 @@ class MethodWriter extends MethodVisitor {
/**
* Updates the offset of the given label.
*
- * @param indexes current positions of the instructions to be resized. Each
- * instruction must be designated by the index of its <i>last</i>
- * byte, plus one (or, in other words, by the index of the <i>first</i>
- * byte of the <i>next</i> instruction).
- * @param sizes the number of bytes to be <i>added</i> to the above
- * instructions. More precisely, for each i < <tt>len</tt>,
- * <tt>sizes</tt>[i] bytes will be added at the end of the
- * instruction designated by <tt>indexes</tt>[i] or, if
- * <tt>sizes</tt>[i] is negative, the <i>last</i> |<tt>sizes[i]</tt>|
- * bytes of the instruction will be removed (the instruction size
- * <i>must not</i> become negative or null).
- * @param label the label whose offset must be updated.
- */
- static void getNewOffset(
- final int[] indexes,
- final int[] sizes,
- final Label label)
- {
+ * @param indexes
+ * current positions of the instructions to be resized. Each
+ * instruction must be designated by the index of its <i>last</i>
+ * byte, plus one (or, in other words, by the index of the
+ * <i>first</i> byte of the <i>next</i> instruction).
+ * @param sizes
+ * the number of bytes to be <i>added</i> to the above
+ * instructions. More precisely, for each i < <tt>len</tt>,
+ * <tt>sizes</tt>[i] bytes will be added at the end of the
+ * instruction designated by <tt>indexes</tt>[i] or, if
+ * <tt>sizes</tt>[i] is negative, the <i>last</i> |
+ * <tt>sizes[i]</tt>| bytes of the instruction will be removed
+ * (the instruction size <i>must not</i> become negative or
+ * null).
+ * @param label
+ * the label whose offset must be updated.
+ */
+ static void getNewOffset(final int[] indexes, final int[] sizes,
+ final Label label) {
if ((label.status & Label.RESIZED) == 0) {
label.position = getNewOffset(indexes, sizes, 0, label.position);
label.status |= Label.RESIZED;
diff --git a/src/asm/scala/tools/asm/Type.java b/src/asm/scala/tools/asm/Type.java
index bf1107182a..7821a492e6 100644
--- a/src/asm/scala/tools/asm/Type.java
+++ b/src/asm/scala/tools/asm/Type.java
@@ -190,13 +190,16 @@ public class Type {
/**
* Constructs a reference type.
*
- * @param sort the sort of the reference type to be constructed.
- * @param buf a buffer containing the descriptor of the previous type.
- * @param off the offset of this descriptor in the previous buffer.
- * @param len the length of this descriptor.
- */
- private Type(final int sort, final char[] buf, final int off, final int len)
- {
+ * @param sort
+ * the sort of the reference type to be constructed.
+ * @param buf
+ * a buffer containing the descriptor of the previous type.
+ * @param off
+ * the offset of this descriptor in the previous buffer.
+ * @param len
+ * the length of this descriptor.
+ */
+ private Type(final int sort, final char[] buf, final int off, final int len) {
this.sort = sort;
this.buf = buf;
this.off = off;
@@ -206,7 +209,8 @@ public class Type {
/**
* Returns the Java type corresponding to the given type descriptor.
*
- * @param typeDescriptor a field or method type descriptor.
+ * @param typeDescriptor
+ * a field or method type descriptor.
* @return the Java type corresponding to the given type descriptor.
*/
public static Type getType(final String typeDescriptor) {
@@ -216,7 +220,8 @@ public class Type {
/**
* Returns the Java type corresponding to the given internal name.
*
- * @param internalName an internal name.
+ * @param internalName
+ * an internal name.
* @return the Java type corresponding to the given internal name.
*/
public static Type getObjectType(final String internalName) {
@@ -228,7 +233,8 @@ public class Type {
* Returns the Java type corresponding to the given method descriptor.
* Equivalent to <code>Type.getType(methodDescriptor)</code>.
*
- * @param methodDescriptor a method descriptor.
+ * @param methodDescriptor
+ * a method descriptor.
* @return the Java type corresponding to the given method descriptor.
*/
public static Type getMethodType(final String methodDescriptor) {
@@ -239,18 +245,23 @@ public class Type {
* Returns the Java method type corresponding to the given argument and
* return types.
*
- * @param returnType the return type of the method.
- * @param argumentTypes the argument types of the method.
- * @return the Java type corresponding to the given argument and return types.
+ * @param returnType
+ * the return type of the method.
+ * @param argumentTypes
+ * the argument types of the method.
+ * @return the Java type corresponding to the given argument and return
+ * types.
*/
- public static Type getMethodType(final Type returnType, final Type... argumentTypes) {
+ public static Type getMethodType(final Type returnType,
+ final Type... argumentTypes) {
return getType(getMethodDescriptor(returnType, argumentTypes));
}
/**
* Returns the Java type corresponding to the given class.
*
- * @param c a class.
+ * @param c
+ * a class.
* @return the Java type corresponding to the given class.
*/
public static Type getType(final Class<?> c) {
@@ -282,7 +293,8 @@ public class Type {
/**
* Returns the Java method type corresponding to the given constructor.
*
- * @param c a {@link Constructor Constructor} object.
+ * @param c
+ * a {@link Constructor Constructor} object.
* @return the Java method type corresponding to the given constructor.
*/
public static Type getType(final Constructor<?> c) {
@@ -292,7 +304,8 @@ public class Type {
/**
* Returns the Java method type corresponding to the given method.
*
- * @param m a {@link Method Method} object.
+ * @param m
+ * a {@link Method Method} object.
* @return the Java method type corresponding to the given method.
*/
public static Type getType(final Method m) {
@@ -303,7 +316,8 @@ public class Type {
* Returns the Java types corresponding to the argument types of the given
* method descriptor.
*
- * @param methodDescriptor a method descriptor.
+ * @param methodDescriptor
+ * a method descriptor.
* @return the Java types corresponding to the argument types of the given
* method descriptor.
*/
@@ -338,7 +352,8 @@ public class Type {
* Returns the Java types corresponding to the argument types of the given
* method.
*
- * @param method a method.
+ * @param method
+ * a method.
* @return the Java types corresponding to the argument types of the given
* method.
*/
@@ -355,7 +370,8 @@ public class Type {
* Returns the Java type corresponding to the return type of the given
* method descriptor.
*
- * @param methodDescriptor a method descriptor.
+ * @param methodDescriptor
+ * a method descriptor.
* @return the Java type corresponding to the return type of the given
* method descriptor.
*/
@@ -368,7 +384,8 @@ public class Type {
* Returns the Java type corresponding to the return type of the given
* method.
*
- * @param method a method.
+ * @param method
+ * a method.
* @return the Java type corresponding to the return type of the given
* method.
*/
@@ -379,12 +396,13 @@ public class Type {
/**
* Computes the size of the arguments and of the return value of a method.
*
- * @param desc the descriptor of a method.
+ * @param desc
+ * the descriptor of a method.
* @return the size of the arguments of the method (plus one for the
* implicit this argument), argSize, and the size of its return
* value, retSize, packed into a single int i =
- * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal
- * to <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
+ * <tt>(argSize << 2) | retSize</tt> (argSize is therefore equal to
+ * <tt>i >> 2</tt>, and retSize to <tt>i & 0x03</tt>).
*/
public static int getArgumentsAndReturnSizes(final String desc) {
int n = 1;
@@ -419,52 +437,54 @@ public class Type {
* method descriptors, buf is supposed to contain nothing more than the
* descriptor itself.
*
- * @param buf a buffer containing a type descriptor.
- * @param off the offset of this descriptor in the previous buffer.
+ * @param buf
+ * a buffer containing a type descriptor.
+ * @param off
+ * the offset of this descriptor in the previous buffer.
* @return the Java type corresponding to the given type descriptor.
*/
private static Type getType(final char[] buf, final int off) {
int len;
switch (buf[off]) {
- case 'V':
- return VOID_TYPE;
- case 'Z':
- return BOOLEAN_TYPE;
- case 'C':
- return CHAR_TYPE;
- case 'B':
- return BYTE_TYPE;
- case 'S':
- return SHORT_TYPE;
- case 'I':
- return INT_TYPE;
- case 'F':
- return FLOAT_TYPE;
- case 'J':
- return LONG_TYPE;
- case 'D':
- return DOUBLE_TYPE;
- case '[':
- len = 1;
- while (buf[off + len] == '[') {
- ++len;
- }
- if (buf[off + len] == 'L') {
- ++len;
- while (buf[off + len] != ';') {
- ++len;
- }
- }
- return new Type(ARRAY, buf, off, len + 1);
- case 'L':
- len = 1;
+ case 'V':
+ return VOID_TYPE;
+ case 'Z':
+ return BOOLEAN_TYPE;
+ case 'C':
+ return CHAR_TYPE;
+ case 'B':
+ return BYTE_TYPE;
+ case 'S':
+ return SHORT_TYPE;
+ case 'I':
+ return INT_TYPE;
+ case 'F':
+ return FLOAT_TYPE;
+ case 'J':
+ return LONG_TYPE;
+ case 'D':
+ return DOUBLE_TYPE;
+ case '[':
+ len = 1;
+ while (buf[off + len] == '[') {
+ ++len;
+ }
+ if (buf[off + len] == 'L') {
+ ++len;
while (buf[off + len] != ';') {
++len;
}
- return new Type(OBJECT, buf, off + 1, len - 1);
+ }
+ return new Type(ARRAY, buf, off, len + 1);
+ case 'L':
+ len = 1;
+ while (buf[off + len] != ';') {
+ ++len;
+ }
+ return new Type(OBJECT, buf, off + 1, len - 1);
// case '(':
- default:
- return new Type(METHOD, buf, 0, buf.length);
+ default:
+ return new Type(METHOD, buf, off, buf.length - off);
}
}
@@ -475,11 +495,11 @@ public class Type {
/**
* Returns the sort of this Java type.
*
- * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN},
- * {@link #CHAR CHAR}, {@link #BYTE BYTE}, {@link #SHORT SHORT},
- * {@link #INT INT}, {@link #FLOAT FLOAT}, {@link #LONG LONG},
- * {@link #DOUBLE DOUBLE}, {@link #ARRAY ARRAY},
- * {@link #OBJECT OBJECT} or {@link #METHOD METHOD}.
+ * @return {@link #VOID VOID}, {@link #BOOLEAN BOOLEAN}, {@link #CHAR CHAR},
+ * {@link #BYTE BYTE}, {@link #SHORT SHORT}, {@link #INT INT},
+ * {@link #FLOAT FLOAT}, {@link #LONG LONG}, {@link #DOUBLE DOUBLE},
+ * {@link #ARRAY ARRAY}, {@link #OBJECT OBJECT} or {@link #METHOD
+ * METHOD}.
*/
public int getSort() {
return sort;
@@ -517,34 +537,34 @@ public class Type {
*/
public String getClassName() {
switch (sort) {
- case VOID:
- return "void";
- case BOOLEAN:
- return "boolean";
- case CHAR:
- return "char";
- case BYTE:
- return "byte";
- case SHORT:
- return "short";
- case INT:
- return "int";
- case FLOAT:
- return "float";
- case LONG:
- return "long";
- case DOUBLE:
- return "double";
- case ARRAY:
- StringBuffer b = new StringBuffer(getElementType().getClassName());
- for (int i = getDimensions(); i > 0; --i) {
- b.append("[]");
- }
- return b.toString();
- case OBJECT:
- return new String(buf, off, len).replace('/', '.');
- default:
- return null;
+ case VOID:
+ return "void";
+ case BOOLEAN:
+ return "boolean";
+ case CHAR:
+ return "char";
+ case BYTE:
+ return "byte";
+ case SHORT:
+ return "short";
+ case INT:
+ return "int";
+ case FLOAT:
+ return "float";
+ case LONG:
+ return "long";
+ case DOUBLE:
+ return "double";
+ case ARRAY:
+ StringBuffer b = new StringBuffer(getElementType().getClassName());
+ for (int i = getDimensions(); i > 0; --i) {
+ b.append("[]");
+ }
+ return b.toString();
+ case OBJECT:
+ return new String(buf, off, len).replace('/', '.');
+ default:
+ return null;
}
}
@@ -613,15 +633,15 @@ public class Type {
* Returns the descriptor corresponding to the given argument and return
* types.
*
- * @param returnType the return type of the method.
- * @param argumentTypes the argument types of the method.
+ * @param returnType
+ * the return type of the method.
+ * @param argumentTypes
+ * the argument types of the method.
* @return the descriptor corresponding to the given argument and return
* types.
*/
- public static String getMethodDescriptor(
- final Type returnType,
- final Type... argumentTypes)
- {
+ public static String getMethodDescriptor(final Type returnType,
+ final Type... argumentTypes) {
StringBuffer buf = new StringBuffer();
buf.append('(');
for (int i = 0; i < argumentTypes.length; ++i) {
@@ -636,11 +656,13 @@ public class Type {
* Appends the descriptor corresponding to this Java type to the given
* string buffer.
*
- * @param buf the string buffer to which the descriptor must be appended.
+ * @param buf
+ * the string buffer to which the descriptor must be appended.
*/
private void getDescriptor(final StringBuffer buf) {
if (this.buf == null) {
- // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+ // descriptor is in byte 3 of 'off' for primitive types (buf ==
+ // null)
buf.append((char) ((off & 0xFF000000) >>> 24));
} else if (sort == OBJECT) {
buf.append('L');
@@ -661,7 +683,8 @@ public class Type {
* class is its fully qualified name, as returned by Class.getName(), where
* '.' are replaced by '/'.
*
- * @param c an object or array class.
+ * @param c
+ * an object or array class.
* @return the internal name of the given class.
*/
public static String getInternalName(final Class<?> c) {
@@ -671,7 +694,8 @@ public class Type {
/**
* Returns the descriptor corresponding to the given Java type.
*
- * @param c an object class, a primitive class or an array class.
+ * @param c
+ * an object class, a primitive class or an array class.
* @return the descriptor corresponding to the given class.
*/
public static String getDescriptor(final Class<?> c) {
@@ -683,7 +707,8 @@ public class Type {
/**
* Returns the descriptor corresponding to the given constructor.
*
- * @param c a {@link Constructor Constructor} object.
+ * @param c
+ * a {@link Constructor Constructor} object.
* @return the descriptor of the given constructor.
*/
public static String getConstructorDescriptor(final Constructor<?> c) {
@@ -699,7 +724,8 @@ public class Type {
/**
* Returns the descriptor corresponding to the given method.
*
- * @param m a {@link Method Method} object.
+ * @param m
+ * a {@link Method Method} object.
* @return the descriptor of the given method.
*/
public static String getMethodDescriptor(final Method m) {
@@ -717,8 +743,10 @@ public class Type {
/**
* Appends the descriptor of the given class to the given string buffer.
*
- * @param buf the string buffer to which the descriptor must be appended.
- * @param c the class whose descriptor must be computed.
+ * @param buf
+ * the string buffer to which the descriptor must be appended.
+ * @param c
+ * the class whose descriptor must be computed.
*/
private static void getDescriptor(final StringBuffer buf, final Class<?> c) {
Class<?> d = c;
@@ -783,9 +811,10 @@ public class Type {
* Returns a JVM instruction opcode adapted to this Java type. This method
* must not be used for method types.
*
- * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
- * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
- * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+ * @param opcode
+ * a JVM instruction opcode. This opcode must be one of ILOAD,
+ * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG,
+ * ISHL, ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
* @return an opcode that is similar to the given opcode, but adapted to
* this Java type. For example, if this type is <tt>float</tt> and
* <tt>opcode</tt> is IRETURN, this method returns FRETURN.
@@ -809,7 +838,8 @@ public class Type {
/**
* Tests if the given object is equal to this type.
*
- * @param o the object to be compared to this type.
+ * @param o
+ * the object to be compared to this type.
* @return <tt>true</tt> if the given object is equal to this type.
*/
@Override
diff --git a/src/asm/scala/tools/asm/signature/SignatureReader.java b/src/asm/scala/tools/asm/signature/SignatureReader.java
index 22e6427e63..9c7c3880d9 100644
--- a/src/asm/scala/tools/asm/signature/SignatureReader.java
+++ b/src/asm/scala/tools/asm/signature/SignatureReader.java
@@ -46,8 +46,9 @@ public class SignatureReader {
/**
* Constructs a {@link SignatureReader} for the given signature.
*
- * @param signature A <i>ClassSignature</i>, <i>MethodTypeSignature</i>,
- * or <i>FieldTypeSignature</i>.
+ * @param signature
+ * A <i>ClassSignature</i>, <i>MethodTypeSignature</i>, or
+ * <i>FieldTypeSignature</i>.
*/
public SignatureReader(final String signature) {
this.signature = signature;
@@ -58,15 +59,15 @@ public class SignatureReader {
* {@link SignatureReader}. This signature is the one specified in the
* constructor (see {@link #SignatureReader(String) SignatureReader}). This
* method is intended to be called on a {@link SignatureReader} that was
- * created using a <i>ClassSignature</i> (such as the
+ * created using a <i>ClassSignature</i> (such as the <code>signature</code>
+ * parameter of the {@link scala.tools.asm.ClassVisitor#visit
+ * ClassVisitor.visit} method) or a <i>MethodTypeSignature</i> (such as the
* <code>signature</code> parameter of the
- * {@link org.objectweb.asm.ClassVisitor#visit ClassVisitor.visit} method)
- * or a <i>MethodTypeSignature</i> (such as the <code>signature</code>
- * parameter of the
- * {@link org.objectweb.asm.ClassVisitor#visitMethod ClassVisitor.visitMethod}
- * method).
+ * {@link scala.tools.asm.ClassVisitor#visitMethod
+ * ClassVisitor.visitMethod} method).
*
- * @param v the visitor that must visit this signature.
+ * @param v
+ * the visitor that must visit this signature.
*/
public void accept(final SignatureVisitor v) {
String signature = this.signature;
@@ -118,12 +119,12 @@ public class SignatureReader {
* method is intended to be called on a {@link SignatureReader} that was
* created using a <i>FieldTypeSignature</i>, such as the
* <code>signature</code> parameter of the
- * {@link org.objectweb.asm.ClassVisitor#visitField
- * ClassVisitor.visitField} or {@link
- * org.objectweb.asm.MethodVisitor#visitLocalVariable
+ * {@link scala.tools.asm.ClassVisitor#visitField ClassVisitor.visitField}
+ * or {@link scala.tools.asm.MethodVisitor#visitLocalVariable
* MethodVisitor.visitLocalVariable} methods.
*
- * @param v the visitor that must visit this signature.
+ * @param v
+ * the visitor that must visit this signature.
*/
public void acceptType(final SignatureVisitor v) {
parseType(this.signature, 0, v);
@@ -132,98 +133,96 @@ public class SignatureReader {
/**
* Parses a field type signature and makes the given visitor visit it.
*
- * @param signature a string containing the signature that must be parsed.
- * @param pos index of the first character of the signature to parsed.
- * @param v the visitor that must visit this signature.
+ * @param signature
+ * a string containing the signature that must be parsed.
+ * @param pos
+ * index of the first character of the signature to parsed.
+ * @param v
+ * the visitor that must visit this signature.
* @return the index of the first character after the parsed signature.
*/
- private static int parseType(
- final String signature,
- int pos,
- final SignatureVisitor v)
- {
+ private static int parseType(final String signature, int pos,
+ final SignatureVisitor v) {
char c;
int start, end;
boolean visited, inner;
String name;
switch (c = signature.charAt(pos++)) {
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- case 'F':
- case 'J':
- case 'D':
- case 'V':
- v.visitBaseType(c);
- return pos;
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ case 'V':
+ v.visitBaseType(c);
+ return pos;
- case '[':
- return parseType(signature, pos, v.visitArrayType());
+ case '[':
+ return parseType(signature, pos, v.visitArrayType());
- case 'T':
- end = signature.indexOf(';', pos);
- v.visitTypeVariable(signature.substring(pos, end));
- return end + 1;
+ case 'T':
+ end = signature.indexOf(';', pos);
+ v.visitTypeVariable(signature.substring(pos, end));
+ return end + 1;
- default: // case 'L':
- start = pos;
- visited = false;
- inner = false;
- for (;;) {
- switch (c = signature.charAt(pos++)) {
- case '.':
- case ';':
- if (!visited) {
- name = signature.substring(start, pos - 1);
- if (inner) {
- v.visitInnerClassType(name);
- } else {
- v.visitClassType(name);
- }
- }
- if (c == ';') {
- v.visitEnd();
- return pos;
- }
- start = pos;
- visited = false;
- inner = true;
- break;
+ default: // case 'L':
+ start = pos;
+ visited = false;
+ inner = false;
+ for (;;) {
+ switch (c = signature.charAt(pos++)) {
+ case '.':
+ case ';':
+ if (!visited) {
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ }
+ if (c == ';') {
+ v.visitEnd();
+ return pos;
+ }
+ start = pos;
+ visited = false;
+ inner = true;
+ break;
- case '<':
- name = signature.substring(start, pos - 1);
- if (inner) {
- v.visitInnerClassType(name);
- } else {
- v.visitClassType(name);
- }
- visited = true;
- top: for (;;) {
- switch (c = signature.charAt(pos)) {
- case '>':
- break top;
- case '*':
- ++pos;
- v.visitTypeArgument();
- break;
- case '+':
- case '-':
- pos = parseType(signature,
- pos + 1,
- v.visitTypeArgument(c));
- break;
- default:
- pos = parseType(signature,
- pos,
- v.visitTypeArgument('='));
- break;
- }
- }
+ case '<':
+ name = signature.substring(start, pos - 1);
+ if (inner) {
+ v.visitInnerClassType(name);
+ } else {
+ v.visitClassType(name);
+ }
+ visited = true;
+ top: for (;;) {
+ switch (c = signature.charAt(pos)) {
+ case '>':
+ break top;
+ case '*':
+ ++pos;
+ v.visitTypeArgument();
+ break;
+ case '+':
+ case '-':
+ pos = parseType(signature, pos + 1,
+ v.visitTypeArgument(c));
+ break;
+ default:
+ pos = parseType(signature, pos,
+ v.visitTypeArgument('='));
+ break;
+ }
}
}
+ }
}
}
}
diff --git a/src/asm/scala/tools/asm/signature/SignatureVisitor.java b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
index 2fc364e374..f38f81f53b 100644
--- a/src/asm/scala/tools/asm/signature/SignatureVisitor.java
+++ b/src/asm/scala/tools/asm/signature/SignatureVisitor.java
@@ -35,21 +35,21 @@ import scala.tools.asm.Opcodes;
* A visitor to visit a generic signature. The methods of this interface must be
* called in one of the three following orders (the last one is the only valid
* order for a {@link SignatureVisitor} that is returned by a method of this
- * interface): <ul> <li><i>ClassSignature</i> = (
- * <tt>visitFormalTypeParameter</tt>
- * <tt>visitClassBound</tt>?
- * <tt>visitInterfaceBound</tt>* )* ( <tt>visitSuperClass</tt>
- * <tt>visitInterface</tt>* )</li>
+ * interface):
+ * <ul>
+ * <li><i>ClassSignature</i> = ( <tt>visitFormalTypeParameter</tt>
+ * <tt>visitClassBound</tt>? <tt>visitInterfaceBound</tt>* )* (
+ * <tt>visitSuperClass</tt> <tt>visitInterface</tt>* )</li>
* <li><i>MethodSignature</i> = ( <tt>visitFormalTypeParameter</tt>
- * <tt>visitClassBound</tt>?
- * <tt>visitInterfaceBound</tt>* )* ( <tt>visitParameterType</tt>*
- * <tt>visitReturnType</tt>
- * <tt>visitExceptionType</tt>* )</li> <li><i>TypeSignature</i> =
- * <tt>visitBaseType</tt> | <tt>visitTypeVariable</tt> |
- * <tt>visitArrayType</tt> | (
+ * <tt>visitClassBound</tt>? <tt>visitInterfaceBound</tt>* )* (
+ * <tt>visitParameterType</tt>* <tt>visitReturnType</tt>
+ * <tt>visitExceptionType</tt>* )</li>
+ * <li><i>TypeSignature</i> = <tt>visitBaseType</tt> |
+ * <tt>visitTypeVariable</tt> | <tt>visitArrayType</tt> | (
* <tt>visitClassType</tt> <tt>visitTypeArgument</tt>* (
- * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )*
- * <tt>visitEnd</tt> ) )</li> </ul>
+ * <tt>visitInnerClassType</tt> <tt>visitTypeArgument</tt>* )* <tt>visitEnd</tt>
+ * ) )</li>
+ * </ul>
*
* @author Thomas Hallgren
* @author Eric Bruneton
@@ -80,8 +80,9 @@ public abstract class SignatureVisitor {
/**
* Constructs a new {@link SignatureVisitor}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public SignatureVisitor(final int api) {
this.api = api;
@@ -90,7 +91,8 @@ public abstract class SignatureVisitor {
/**
* Visits a formal type parameter.
*
- * @param name the name of the formal parameter.
+ * @param name
+ * the name of the formal parameter.
*/
public void visitFormalTypeParameter(String name) {
}
@@ -162,8 +164,9 @@ public abstract class SignatureVisitor {
/**
* Visits a signature corresponding to a primitive type.
*
- * @param descriptor the descriptor of the primitive type, or 'V' for
- * <tt>void</tt>.
+ * @param descriptor
+ * the descriptor of the primitive type, or 'V' for <tt>void</tt>
+ * .
*/
public void visitBaseType(char descriptor) {
}
@@ -171,7 +174,8 @@ public abstract class SignatureVisitor {
/**
* Visits a signature corresponding to a type variable.
*
- * @param name the name of the type variable.
+ * @param name
+ * the name of the type variable.
*/
public void visitTypeVariable(String name) {
}
@@ -190,7 +194,8 @@ public abstract class SignatureVisitor {
* Starts the visit of a signature corresponding to a class or interface
* type.
*
- * @param name the internal name of the class or interface.
+ * @param name
+ * the internal name of the class or interface.
*/
public void visitClassType(String name) {
}
@@ -198,7 +203,8 @@ public abstract class SignatureVisitor {
/**
* Visits an inner class.
*
- * @param name the local name of the inner class in its enclosing class.
+ * @param name
+ * the local name of the inner class in its enclosing class.
*/
public void visitInnerClassType(String name) {
}
@@ -213,7 +219,8 @@ public abstract class SignatureVisitor {
/**
* Visits a type argument of the last visited class or inner class type.
*
- * @param wildcard '+', '-' or '='.
+ * @param wildcard
+ * '+', '-' or '='.
* @return a non null visitor to visit the signature of the type argument.
*/
public SignatureVisitor visitTypeArgument(char wildcard) {
diff --git a/src/asm/scala/tools/asm/signature/SignatureWriter.java b/src/asm/scala/tools/asm/signature/SignatureWriter.java
index a59fdfde2b..ebf4fe07b4 100644
--- a/src/asm/scala/tools/asm/signature/SignatureWriter.java
+++ b/src/asm/scala/tools/asm/signature/SignatureWriter.java
@@ -224,4 +224,4 @@ public class SignatureWriter extends SignatureVisitor {
}
argumentStack /= 2;
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
index 471f842ffc..411eead3c7 100644
--- a/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/AbstractInsnNode.java
@@ -148,7 +148,8 @@ public abstract class AbstractInsnNode {
/**
* Constructs a new {@link AbstractInsnNode}.
*
- * @param opcode the opcode of the instruction to be constructed.
+ * @param opcode
+ * the opcode of the instruction to be constructed.
*/
protected AbstractInsnNode(final int opcode) {
this.opcode = opcode;
@@ -197,38 +198,47 @@ public abstract class AbstractInsnNode {
/**
* Makes the given code visitor visit this instruction.
*
- * @param cv a code visitor.
+ * @param cv
+ * a code visitor.
*/
public abstract void accept(final MethodVisitor cv);
/**
* Returns a copy of this instruction.
*
- * @param labels a map from LabelNodes to cloned LabelNodes.
+ * @param labels
+ * a map from LabelNodes to cloned LabelNodes.
* @return a copy of this instruction. The returned instruction does not
* belong to any {@link InsnList}.
*/
- public abstract AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels);
+ public abstract AbstractInsnNode clone(
+ final Map<LabelNode, LabelNode> labels);
/**
* Returns the clone of the given label.
*
- * @param label a label.
- * @param map a map from LabelNodes to cloned LabelNodes.
+ * @param label
+ * a label.
+ * @param map
+ * a map from LabelNodes to cloned LabelNodes.
* @return the clone of the given label.
*/
- static LabelNode clone(final LabelNode label, final Map<LabelNode, LabelNode> map) {
+ static LabelNode clone(final LabelNode label,
+ final Map<LabelNode, LabelNode> map) {
return map.get(label);
}
/**
* Returns the clones of the given labels.
*
- * @param labels a list of labels.
- * @param map a map from LabelNodes to cloned LabelNodes.
+ * @param labels
+ * a list of labels.
+ * @param map
+ * a map from LabelNodes to cloned LabelNodes.
* @return the clones of the given labels.
*/
- static LabelNode[] clone(final List<LabelNode> labels, final Map<LabelNode, LabelNode> map) {
+ static LabelNode[] clone(final List<LabelNode> labels,
+ final Map<LabelNode, LabelNode> map) {
LabelNode[] clones = new LabelNode[labels.size()];
for (int i = 0; i < clones.length; ++i) {
clones[i] = map.get(labels.get(i));
diff --git a/src/asm/scala/tools/asm/tree/AnnotationNode.java b/src/asm/scala/tools/asm/tree/AnnotationNode.java
index 9f132550e6..1f4beef9f7 100644
--- a/src/asm/scala/tools/asm/tree/AnnotationNode.java
+++ b/src/asm/scala/tools/asm/tree/AnnotationNode.java
@@ -52,11 +52,11 @@ public class AnnotationNode extends AnnotationVisitor {
* as two consecutive elements in the list. The name is a {@link String},
* and the value may be a {@link Byte}, {@link Boolean}, {@link Character},
* {@link Short}, {@link Integer}, {@link Long}, {@link Float},
- * {@link Double}, {@link String} or {@link org.objectweb.asm.Type}, or an
+ * {@link Double}, {@link String} or {@link scala.tools.asm.Type}, or an
* two elements String array (for enumeration values), a
* {@link AnnotationNode}, or a {@link List} of values of one of the
- * preceding types. The list may be <tt>null</tt> if there is no name
- * value pair.
+ * preceding types. The list may be <tt>null</tt> if there is no name value
+ * pair.
*/
public List<Object> values;
@@ -65,7 +65,8 @@ public class AnnotationNode extends AnnotationVisitor {
* constructor</i>. Instead, they must use the
* {@link #AnnotationNode(int, String)} version.
*
- * @param desc the class descriptor of the annotation class.
+ * @param desc
+ * the class descriptor of the annotation class.
*/
public AnnotationNode(final String desc) {
this(Opcodes.ASM4, desc);
@@ -74,9 +75,11 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Constructs a new {@link AnnotationNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param desc the class descriptor of the annotation class.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param desc
+ * the class descriptor of the annotation class.
*/
public AnnotationNode(final int api, final String desc) {
super(api);
@@ -86,7 +89,8 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Constructs a new {@link AnnotationNode} to visit an array value.
*
- * @param values where the visited values must be stored.
+ * @param values
+ * where the visited values must be stored.
*/
AnnotationNode(final List<Object> values) {
super(Opcodes.ASM4);
@@ -109,11 +113,8 @@ public class AnnotationNode extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
if (values == null) {
values = new ArrayList<Object>(this.desc != null ? 2 : 1);
}
@@ -124,10 +125,8 @@ public class AnnotationNode extends AnnotationVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
if (values == null) {
values = new ArrayList<Object>(this.desc != null ? 2 : 1);
}
@@ -166,7 +165,8 @@ public class AnnotationNode extends AnnotationVisitor {
* recursively, do not contain elements that were introduced in more recent
* versions of the ASM API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -175,7 +175,8 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Makes the given visitor visit this annotation.
*
- * @param av an annotation visitor. Maybe <tt>null</tt>.
+ * @param av
+ * an annotation visitor. Maybe <tt>null</tt>.
*/
public void accept(final AnnotationVisitor av) {
if (av != null) {
@@ -193,15 +194,15 @@ public class AnnotationNode extends AnnotationVisitor {
/**
* Makes the given visitor visit a given annotation value.
*
- * @param av an annotation visitor. Maybe <tt>null</tt>.
- * @param name the value name.
- * @param value the actual value.
+ * @param av
+ * an annotation visitor. Maybe <tt>null</tt>.
+ * @param name
+ * the value name.
+ * @param value
+ * the actual value.
*/
- static void accept(
- final AnnotationVisitor av,
- final String name,
- final Object value)
- {
+ static void accept(final AnnotationVisitor av, final String name,
+ final Object value) {
if (av != null) {
if (value instanceof String[]) {
String[] typeconst = (String[]) value;
diff --git a/src/asm/scala/tools/asm/tree/ClassNode.java b/src/asm/scala/tools/asm/tree/ClassNode.java
index 64effae698..c3d999985a 100644
--- a/src/asm/scala/tools/asm/tree/ClassNode.java
+++ b/src/asm/scala/tools/asm/tree/ClassNode.java
@@ -53,33 +53,33 @@ public class ClassNode extends ClassVisitor {
public int version;
/**
- * The class's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * The class's access flags (see {@link scala.tools.asm.Opcodes}). This
* field also indicates if the class is deprecated.
*/
public int access;
/**
* The internal name of the class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String name;
/**
- * The signature of the class. Mayt be <tt>null</tt>.
+ * The signature of the class. May be <tt>null</tt>.
*/
public String signature;
/**
* The internal of name of the super class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). For
- * interfaces, the super class is {@link Object}. May be <tt>null</tt>,
- * but only for the {@link Object} class.
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}). For
+ * interfaces, the super class is {@link Object}. May be <tt>null</tt>, but
+ * only for the {@link Object} class.
*/
public String superName;
/**
* The internal names of the class's interfaces (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). This
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}). This
* list is a list of {@link String} objects.
*/
public List<String> interfaces;
@@ -91,7 +91,7 @@ public class ClassNode extends ClassVisitor {
public String sourceFile;
/**
- * Debug information to compute the correspondance between source and
+ * Debug information to compute the correspondence between source and
* compiled elements of the class. May be <tt>null</tt>.
*/
public String sourceDebug;
@@ -109,8 +109,8 @@ public class ClassNode extends ClassVisitor {
public String outerMethod;
/**
- * The descriptor of the method that contains the class, or <tt>null</tt>
- * if the class is not enclosed in a method.
+ * The descriptor of the method that contains the class, or <tt>null</tt> if
+ * the class is not enclosed in a method.
*/
public String outerMethodDesc;
@@ -118,7 +118,7 @@ public class ClassNode extends ClassVisitor {
* The runtime visible annotations of this class. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible
*/
public List<AnnotationNode> visibleAnnotations;
@@ -127,7 +127,7 @@ public class ClassNode extends ClassVisitor {
* The runtime invisible annotations of this class. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible
*/
public List<AnnotationNode> invisibleAnnotations;
@@ -136,7 +136,7 @@ public class ClassNode extends ClassVisitor {
* The non standard attributes of this class. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.Attribute
+ * @associates scala.tools.asm.Attribute
*/
public List<Attribute> attrs;
@@ -144,7 +144,7 @@ public class ClassNode extends ClassVisitor {
* Informations about the inner classes of this class. This list is a list
* of {@link InnerClassNode} objects.
*
- * @associates org.objectweb.asm.tree.InnerClassNode
+ * @associates scala.tools.asm.tree.InnerClassNode
*/
public List<InnerClassNode> innerClasses;
@@ -152,7 +152,7 @@ public class ClassNode extends ClassVisitor {
* The fields of this class. This list is a list of {@link FieldNode}
* objects.
*
- * @associates org.objectweb.asm.tree.FieldNode
+ * @associates scala.tools.asm.tree.FieldNode
*/
public List<FieldNode> fields;
@@ -160,7 +160,7 @@ public class ClassNode extends ClassVisitor {
* The methods of this class. This list is a list of {@link MethodNode}
* objects.
*
- * @associates org.objectweb.asm.tree.MethodNode
+ * @associates scala.tools.asm.tree.MethodNode
*/
public List<MethodNode> methods;
@@ -176,8 +176,9 @@ public class ClassNode extends ClassVisitor {
/**
* Constructs a new {@link ClassNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public ClassNode(final int api) {
super(api);
@@ -192,14 +193,9 @@ public class ClassNode extends ClassVisitor {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
this.version = version;
this.access = access;
this.name = name;
@@ -217,21 +213,16 @@ public class ClassNode extends ClassVisitor {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
outerClass = owner;
outerMethod = name;
outerMethodDesc = desc;
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleAnnotations == null) {
@@ -256,44 +247,25 @@ public class ClassNode extends ClassVisitor {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
- InnerClassNode icn = new InnerClassNode(name,
- outerName,
- innerName,
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
+ InnerClassNode icn = new InnerClassNode(name, outerName, innerName,
access);
innerClasses.add(icn);
}
@Override
- public FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
FieldNode fn = new FieldNode(access, name, desc, signature, value);
fields.add(fn);
return fn;
}
@Override
- public MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
- MethodNode mn = new MethodNode(access,
- name,
- desc,
- signature,
+ public MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
+ MethodNode mn = new MethodNode(access, name, desc, signature,
exceptions);
methods.add(mn);
return mn;
@@ -313,7 +285,8 @@ public class ClassNode extends ClassVisitor {
* contain elements that were introduced in more recent versions of the ASM
* API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -322,7 +295,8 @@ public class ClassNode extends ClassVisitor {
/**
* Makes the given class visitor visit this class.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
// visits header
diff --git a/src/asm/scala/tools/asm/tree/FieldInsnNode.java b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
index 6b7a6a142a..0c94f18adf 100644
--- a/src/asm/scala/tools/asm/tree/FieldInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldInsnNode.java
@@ -43,7 +43,7 @@ public class FieldInsnNode extends AbstractInsnNode {
/**
* The internal name of the field's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String owner;
@@ -53,26 +53,27 @@ public class FieldInsnNode extends AbstractInsnNode {
public String name;
/**
- * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ * The field's descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
/**
* Constructs a new {@link FieldInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
- * @param owner the internal name of the field's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link org.objectweb.asm.Type}).
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be GETSTATIC, PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param owner
+ * the internal name of the field's owner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link scala.tools.asm.Type}).
*/
- public FieldInsnNode(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public FieldInsnNode(final int opcode, final String owner,
+ final String name, final String desc) {
super(opcode);
this.owner = owner;
this.name = name;
@@ -82,8 +83,9 @@ public class FieldInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be GETSTATIC,
- * PUTSTATIC, GETFIELD or PUTFIELD.
+ * @param opcode
+ * the new instruction opcode. This opcode must be GETSTATIC,
+ * PUTSTATIC, GETFIELD or PUTFIELD.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/FieldNode.java b/src/asm/scala/tools/asm/tree/FieldNode.java
index 9a1e17033c..61b614ec59 100644
--- a/src/asm/scala/tools/asm/tree/FieldNode.java
+++ b/src/asm/scala/tools/asm/tree/FieldNode.java
@@ -46,7 +46,7 @@ import scala.tools.asm.Opcodes;
public class FieldNode extends FieldVisitor {
/**
- * The field's access flags (see {@link org.objectweb.asm.Opcodes}). This
+ * The field's access flags (see {@link scala.tools.asm.Opcodes}). This
* field also indicates if the field is synthetic and/or deprecated.
*/
public int access;
@@ -57,7 +57,7 @@ public class FieldNode extends FieldVisitor {
public String name;
/**
- * The field's descriptor (see {@link org.objectweb.asm.Type}).
+ * The field's descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
@@ -67,8 +67,8 @@ public class FieldNode extends FieldVisitor {
public String signature;
/**
- * The field's initial value. This field, which may be <tt>null</tt> if
- * the field does not have an initial value, must be an {@link Integer}, a
+ * The field's initial value. This field, which may be <tt>null</tt> if the
+ * field does not have an initial value, must be an {@link Integer}, a
* {@link Float}, a {@link Long}, a {@link Double} or a {@link String}.
*/
public Object value;
@@ -77,7 +77,7 @@ public class FieldNode extends FieldVisitor {
* The runtime visible annotations of this field. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible
*/
public List<AnnotationNode> visibleAnnotations;
@@ -86,7 +86,7 @@ public class FieldNode extends FieldVisitor {
* The runtime invisible annotations of this field. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible
*/
public List<AnnotationNode> invisibleAnnotations;
@@ -95,7 +95,7 @@ public class FieldNode extends FieldVisitor {
* The non standard attributes of this field. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.Attribute
+ * @associates scala.tools.asm.Attribute
*/
public List<Attribute> attrs;
@@ -104,25 +104,25 @@ public class FieldNode extends FieldVisitor {
* constructor</i>. Instead, they must use the
* {@link #FieldNode(int, int, String, String, String, Object)} version.
*
- * @param access the field's access flags (see
- * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
- * if the field is synthetic and/or deprecated.
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
- * Type}).
- * @param signature the field's signature.
- * @param value the field's initial value. This parameter, which may be
- * <tt>null</tt> if the field does not have an initial value, must be
- * an {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String}.
+ * @param access
+ * the field's access flags (see
+ * {@link scala.tools.asm.Opcodes}). This parameter also
+ * indicates if the field is synthetic and/or deprecated.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link scala.tools.asm.Type
+ * Type}).
+ * @param signature
+ * the field's signature.
+ * @param value
+ * the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value,
+ * must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
*/
- public FieldNode(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldNode(final int access, final String name, final String desc,
+ final String signature, final Object value) {
this(Opcodes.ASM4, access, name, desc, signature, value);
}
@@ -131,28 +131,28 @@ public class FieldNode extends FieldVisitor {
* constructor</i>. Instead, they must use the
* {@link #FieldNode(int, int, String, String, String, Object)} version.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param access the field's access flags (see
- * {@link org.objectweb.asm.Opcodes}). This parameter also indicates
- * if the field is synthetic and/or deprecated.
- * @param name the field's name.
- * @param desc the field's descriptor (see {@link org.objectweb.asm.Type
- * Type}).
- * @param signature the field's signature.
- * @param value the field's initial value. This parameter, which may be
- * <tt>null</tt> if the field does not have an initial value, must be
- * an {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access
+ * the field's access flags (see
+ * {@link scala.tools.asm.Opcodes}). This parameter also
+ * indicates if the field is synthetic and/or deprecated.
+ * @param name
+ * the field's name.
+ * @param desc
+ * the field's descriptor (see {@link scala.tools.asm.Type
+ * Type}).
+ * @param signature
+ * the field's signature.
+ * @param value
+ * the field's initial value. This parameter, which may be
+ * <tt>null</tt> if the field does not have an initial value,
+ * must be an {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
*/
- public FieldNode(
- final int api,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldNode(final int api, final int access, final String name,
+ final String desc, final String signature, final Object value) {
super(api);
this.access = access;
this.name = name;
@@ -166,10 +166,8 @@ public class FieldNode extends FieldVisitor {
// ------------------------------------------------------------------------
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleAnnotations == null) {
@@ -207,7 +205,8 @@ public class FieldNode extends FieldVisitor {
* contain elements that were introduced in more recent versions of the ASM
* API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -216,7 +215,8 @@ public class FieldNode extends FieldVisitor {
/**
* Makes the given class visitor visit this field.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
FieldVisitor fv = cv.visitField(access, name, desc, signature, value);
diff --git a/src/asm/scala/tools/asm/tree/FrameNode.java b/src/asm/scala/tools/asm/tree/FrameNode.java
index 66825de0ac..f13fc66749 100644
--- a/src/asm/scala/tools/asm/tree/FrameNode.java
+++ b/src/asm/scala/tools/asm/tree/FrameNode.java
@@ -45,8 +45,9 @@ import scala.tools.asm.Opcodes;
* the target of a jump instruction, or that starts an exception handler block.
* The stack map frame types must describe the values of the local variables and
* of the operand stack elements <i>just before</i> <b>i</b> is executed. <br>
- * <br> (*) this is mandatory only for classes whose version is greater than or
- * equal to {@link Opcodes#V1_6 V1_6}.
+ * <br>
+ * (*) this is mandatory only for classes whose version is greater than or equal
+ * to {@link Opcodes#V1_6 V1_6}.
*
* @author Eric Bruneton
*/
@@ -83,48 +84,48 @@ public class FrameNode extends AbstractInsnNode {
/**
* Constructs a new {@link FrameNode}.
*
- * @param type the type of this frame. Must be {@link Opcodes#F_NEW} for
- * expanded frames, or {@link Opcodes#F_FULL},
- * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
- * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
- * {@link Opcodes#F_SAME1} for compressed frames.
- * @param nLocal number of local variables of this stack map frame.
- * @param local the types of the local variables of this stack map frame.
- * Elements of this list can be Integer, String or LabelNode objects
- * (for primitive, reference and uninitialized types respectively -
- * see {@link MethodVisitor}).
- * @param nStack number of operand stack elements of this stack map frame.
- * @param stack the types of the operand stack elements of this stack map
- * frame. Elements of this list can be Integer, String or LabelNode
- * objects (for primitive, reference and uninitialized types
- * respectively - see {@link MethodVisitor}).
+ * @param type
+ * the type of this frame. Must be {@link Opcodes#F_NEW} for
+ * expanded frames, or {@link Opcodes#F_FULL},
+ * {@link Opcodes#F_APPEND}, {@link Opcodes#F_CHOP},
+ * {@link Opcodes#F_SAME} or {@link Opcodes#F_APPEND},
+ * {@link Opcodes#F_SAME1} for compressed frames.
+ * @param nLocal
+ * number of local variables of this stack map frame.
+ * @param local
+ * the types of the local variables of this stack map frame.
+ * Elements of this list can be Integer, String or LabelNode
+ * objects (for primitive, reference and uninitialized types
+ * respectively - see {@link MethodVisitor}).
+ * @param nStack
+ * number of operand stack elements of this stack map frame.
+ * @param stack
+ * the types of the operand stack elements of this stack map
+ * frame. Elements of this list can be Integer, String or
+ * LabelNode objects (for primitive, reference and uninitialized
+ * types respectively - see {@link MethodVisitor}).
*/
- public FrameNode(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public FrameNode(final int type, final int nLocal, final Object[] local,
+ final int nStack, final Object[] stack) {
super(-1);
this.type = type;
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- this.local = asList(nLocal, local);
- this.stack = asList(nStack, stack);
- break;
- case Opcodes.F_APPEND:
- this.local = asList(nLocal, local);
- break;
- case Opcodes.F_CHOP:
- this.local = Arrays.asList(new Object[nLocal]);
- break;
- case Opcodes.F_SAME:
- break;
- case Opcodes.F_SAME1:
- this.stack = asList(1, stack);
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ this.local = asList(nLocal, local);
+ this.stack = asList(nStack, stack);
+ break;
+ case Opcodes.F_APPEND:
+ this.local = asList(nLocal, local);
+ break;
+ case Opcodes.F_CHOP:
+ this.local = Arrays.asList(new Object[nLocal]);
+ break;
+ case Opcodes.F_SAME:
+ break;
+ case Opcodes.F_SAME1:
+ this.stack = asList(1, stack);
+ break;
}
}
@@ -136,31 +137,29 @@ public class FrameNode extends AbstractInsnNode {
/**
* Makes the given visitor visit this stack map frame.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
@Override
public void accept(final MethodVisitor mv) {
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- mv.visitFrame(type,
- local.size(),
- asArray(local),
- stack.size(),
- asArray(stack));
- break;
- case Opcodes.F_APPEND:
- mv.visitFrame(type, local.size(), asArray(local), 0, null);
- break;
- case Opcodes.F_CHOP:
- mv.visitFrame(type, local.size(), null, 0, null);
- break;
- case Opcodes.F_SAME:
- mv.visitFrame(type, 0, null, 0, null);
- break;
- case Opcodes.F_SAME1:
- mv.visitFrame(type, 0, null, 1, asArray(stack));
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mv.visitFrame(type, local.size(), asArray(local), stack.size(),
+ asArray(stack));
+ break;
+ case Opcodes.F_APPEND:
+ mv.visitFrame(type, local.size(), asArray(local), 0, null);
+ break;
+ case Opcodes.F_CHOP:
+ mv.visitFrame(type, local.size(), null, 0, null);
+ break;
+ case Opcodes.F_SAME:
+ mv.visitFrame(type, 0, null, 0, null);
+ break;
+ case Opcodes.F_SAME1:
+ mv.visitFrame(type, 0, null, 1, asArray(stack));
+ break;
}
}
diff --git a/src/asm/scala/tools/asm/tree/IincInsnNode.java b/src/asm/scala/tools/asm/tree/IincInsnNode.java
index 75ac40884d..f9adf2e38c 100644
--- a/src/asm/scala/tools/asm/tree/IincInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IincInsnNode.java
@@ -54,8 +54,10 @@ public class IincInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link IincInsnNode}.
*
- * @param var index of the local variable to be incremented.
- * @param incr increment amount to increment the local variable by.
+ * @param var
+ * index of the local variable to be incremented.
+ * @param incr
+ * increment amount to increment the local variable by.
*/
public IincInsnNode(final int var, final int incr) {
super(Opcodes.IINC);
@@ -77,4 +79,4 @@ public class IincInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new IincInsnNode(var, incr);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/InnerClassNode.java b/src/asm/scala/tools/asm/tree/InnerClassNode.java
index 4579488921..aa3810c759 100644
--- a/src/asm/scala/tools/asm/tree/InnerClassNode.java
+++ b/src/asm/scala/tools/asm/tree/InnerClassNode.java
@@ -40,14 +40,14 @@ public class InnerClassNode {
/**
* The internal name of an inner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String name;
/**
* The internal name of the class to which the inner class belongs (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}). May
- * be <tt>null</tt>.
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
public String outerName;
@@ -66,24 +66,23 @@ public class InnerClassNode {
/**
* Constructs a new {@link InnerClassNode}.
*
- * @param name the internal name of an inner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * @param outerName the internal name of the class to which the inner class
- * belongs (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * May be <tt>null</tt>.
- * @param innerName the (simple) name of the inner class inside its
- * enclosing class. May be <tt>null</tt> for anonymous inner
- * classes.
- * @param access the access flags of the inner class as originally declared
- * in the enclosing class.
+ * @param name
+ * the internal name of an inner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param outerName
+ * the internal name of the class to which the inner class
+ * belongs (see {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}). May be <tt>null</tt>.
+ * @param innerName
+ * the (simple) name of the inner class inside its enclosing
+ * class. May be <tt>null</tt> for anonymous inner classes.
+ * @param access
+ * the access flags of the inner class as originally declared in
+ * the enclosing class.
*/
- public InnerClassNode(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public InnerClassNode(final String name, final String outerName,
+ final String innerName, final int access) {
this.name = name;
this.outerName = outerName;
this.innerName = innerName;
@@ -93,7 +92,8 @@ public class InnerClassNode {
/**
* Makes the given class visitor visit this inner class.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
cv.visitInnerClass(name, outerName, innerName, access);
diff --git a/src/asm/scala/tools/asm/tree/InsnList.java b/src/asm/scala/tools/asm/tree/InsnList.java
index dedd3bba73..b1e2d97c6f 100644
--- a/src/asm/scala/tools/asm/tree/InsnList.java
+++ b/src/asm/scala/tools/asm/tree/InsnList.java
@@ -73,8 +73,8 @@ public class InsnList {
/**
* Returns the first instruction in this list.
*
- * @return the first instruction in this list, or <tt>null</tt> if the
- * list is empty.
+ * @return the first instruction in this list, or <tt>null</tt> if the list
+ * is empty.
*/
public AbstractInsnNode getFirst() {
return first;
@@ -96,9 +96,11 @@ public class InsnList {
* time it is called. Once the cache is built, this method run in constant
* time. This cache is invalidated by all the methods that modify the list.
*
- * @param index the index of the instruction that must be returned.
+ * @param index
+ * the index of the instruction that must be returned.
* @return the instruction whose index is given.
- * @throws IndexOutOfBoundsException if (index < 0 || index >= size()).
+ * @throws IndexOutOfBoundsException
+ * if (index < 0 || index >= size()).
*/
public AbstractInsnNode get(final int index) {
if (index < 0 || index >= size) {
@@ -111,11 +113,12 @@ public class InsnList {
}
/**
- * Returns <tt>true</tt> if the given instruction belongs to this list.
- * This method always scans the instructions of this list until it finds the
+ * Returns <tt>true</tt> if the given instruction belongs to this list. This
+ * method always scans the instructions of this list until it finds the
* given instruction or reaches the end of the list.
*
- * @param insn an instruction.
+ * @param insn
+ * an instruction.
* @return <tt>true</tt> if the given instruction belongs to this list.
*/
public boolean contains(final AbstractInsnNode insn) {
@@ -133,7 +136,8 @@ public class InsnList {
* constant time. The cache is invalidated by all the methods that modify
* the list.
*
- * @param insn an instruction <i>of this list</i>.
+ * @param insn
+ * an instruction <i>of this list</i>.
* @return the index of the given instruction in this list. <i>The result of
* this method is undefined if the given instruction does not belong
* to this list</i>. Use {@link #contains contains} to test if an
@@ -149,7 +153,8 @@ public class InsnList {
/**
* Makes the given visitor visit all of the instructions in this list.
*
- * @param mv the method visitor that must visit the instructions.
+ * @param mv
+ * the method visitor that must visit the instructions.
*/
public void accept(final MethodVisitor mv) {
AbstractInsnNode insn = first;
@@ -198,9 +203,11 @@ public class InsnList {
/**
* Replaces an instruction of this list with another instruction.
*
- * @param location an instruction <i>of this list</i>.
- * @param insn another instruction, <i>which must not belong to any
- * {@link InsnList}</i>.
+ * @param location
+ * an instruction <i>of this list</i>.
+ * @param insn
+ * another instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
*/
public void set(final AbstractInsnNode location, final AbstractInsnNode insn) {
AbstractInsnNode next = location.next;
@@ -232,10 +239,19 @@ public class InsnList {
/**
* Adds the given instruction to the end of this list.
*
- * @param insn an instruction, <i>which must not belong to any
- * {@link InsnList}</i>.
+ * @param insn
+ * an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
*/
public void add(final AbstractInsnNode insn) {
+ if(insn.prev != null || insn.next != null) {
+ // Adding an instruction that still refers to others (in the same or another InsnList) leads to hard to debug bugs.
+ // Initially everything may look ok (e.g. iteration follows `next` thus a stale `prev` isn't noticed).
+ // However, a stale link brings the doubly-linked into disarray e.g. upon removing an element,
+ // which results in the `next` of a stale `prev` being updated, among other failure scenarios.
+ // Better fail early.
+ throw new RuntimeException("Instruction " + insn + " already belongs to some InsnList.");
+ }
++size;
if (last == null) {
first = insn;
@@ -252,8 +268,9 @@ public class InsnList {
/**
* Adds the given instructions to the end of this list.
*
- * @param insns an instruction list, which is cleared during the process.
- * This list must be different from 'this'.
+ * @param insns
+ * an instruction list, which is cleared during the process. This
+ * list must be different from 'this'.
*/
public void add(final InsnList insns) {
if (insns.size == 0) {
@@ -276,8 +293,9 @@ public class InsnList {
/**
* Inserts the given instruction at the begining of this list.
*
- * @param insn an instruction, <i>which must not belong to any
- * {@link InsnList}</i>.
+ * @param insn
+ * an instruction, <i>which must not belong to any
+ * {@link InsnList}</i>.
*/
public void insert(final AbstractInsnNode insn) {
++size;
@@ -296,8 +314,9 @@ public class InsnList {
/**
* Inserts the given instructions at the begining of this list.
*
- * @param insns an instruction list, which is cleared during the process.
- * This list must be different from 'this'.
+ * @param insns
+ * an instruction list, which is cleared during the process. This
+ * list must be different from 'this'.
*/
public void insert(final InsnList insns) {
if (insns.size == 0) {
@@ -320,12 +339,15 @@ public class InsnList {
/**
* Inserts the given instruction after the specified instruction.
*
- * @param location an instruction <i>of this list</i> after which insn must be
- * inserted.
- * @param insn the instruction to be inserted, <i>which must not belong to
- * any {@link InsnList}</i>.
+ * @param location
+ * an instruction <i>of this list</i> after which insn must be
+ * inserted.
+ * @param insn
+ * the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
*/
- public void insert(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ public void insert(final AbstractInsnNode location,
+ final AbstractInsnNode insn) {
++size;
AbstractInsnNode next = location.next;
if (next == null) {
@@ -343,10 +365,12 @@ public class InsnList {
/**
* Inserts the given instructions after the specified instruction.
*
- * @param location an instruction <i>of this list</i> after which the
- * instructions must be inserted.
- * @param insns the instruction list to be inserted, which is cleared during
- * the process. This list must be different from 'this'.
+ * @param location
+ * an instruction <i>of this list</i> after which the
+ * instructions must be inserted.
+ * @param insns
+ * the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
*/
public void insert(final AbstractInsnNode location, final InsnList insns) {
if (insns.size == 0) {
@@ -371,12 +395,15 @@ public class InsnList {
/**
* Inserts the given instruction before the specified instruction.
*
- * @param location an instruction <i>of this list</i> before which insn must be
- * inserted.
- * @param insn the instruction to be inserted, <i>which must not belong to
- * any {@link InsnList}</i>.
+ * @param location
+ * an instruction <i>of this list</i> before which insn must be
+ * inserted.
+ * @param insn
+ * the instruction to be inserted, <i>which must not belong to
+ * any {@link InsnList}</i>.
*/
- public void insertBefore(final AbstractInsnNode location, final AbstractInsnNode insn) {
+ public void insertBefore(final AbstractInsnNode location,
+ final AbstractInsnNode insn) {
++size;
AbstractInsnNode prev = location.prev;
if (prev == null) {
@@ -394,37 +421,39 @@ public class InsnList {
/**
* Inserts the given instructions before the specified instruction.
*
- * @param location an instruction <i>of this list</i> before which the instructions
- * must be inserted.
- * @param insns the instruction list to be inserted, which is cleared during
- * the process. This list must be different from 'this'.
+ * @param location
+ * an instruction <i>of this list</i> before which the
+ * instructions must be inserted.
+ * @param insns
+ * the instruction list to be inserted, which is cleared during
+ * the process. This list must be different from 'this'.
*/
- public void insertBefore(final AbstractInsnNode location, final InsnList insns) {
+ public void insertBefore(final AbstractInsnNode location,
+ final InsnList insns) {
if (insns.size == 0) {
return;
}
size += insns.size;
AbstractInsnNode ifirst = insns.first;
AbstractInsnNode ilast = insns.last;
- AbstractInsnNode prev = location .prev;
+ AbstractInsnNode prev = location.prev;
if (prev == null) {
first = ifirst;
} else {
prev.next = ifirst;
}
- location .prev = ilast;
- ilast.next = location ;
+ location.prev = ilast;
+ ilast.next = location;
ifirst.prev = prev;
cache = null;
insns.removeAll(false);
}
-
-
/**
* Removes the given instruction from this list.
*
- * @param insn the instruction <i>of this list</i> that must be removed.
+ * @param insn
+ * the instruction <i>of this list</i> that must be removed.
*/
public void remove(final AbstractInsnNode insn) {
--size;
@@ -456,8 +485,9 @@ public class InsnList {
/**
* Removes all of the instructions of this list.
*
- * @param mark if the instructions must be marked as no longer belonging to
- * any {@link InsnList}.
+ * @param mark
+ * if the instructions must be marked as no longer belonging to
+ * any {@link InsnList}.
*/
void removeAll(final boolean mark) {
if (mark) {
@@ -499,14 +529,14 @@ public class InsnList {
}
// this class is not generified because it will create bridges
- private final class InsnListIterator implements ListIterator/*<AbstractInsnNode>*/ {
+ private final class InsnListIterator implements ListIterator {
AbstractInsnNode next;
AbstractInsnNode prev;
InsnListIterator(int index) {
- if(index==size()) {
+ if (index == size()) {
next = null;
prev = getLast();
} else {
diff --git a/src/asm/scala/tools/asm/tree/InsnNode.java b/src/asm/scala/tools/asm/tree/InsnNode.java
index d4664d23c2..4d5288cafa 100644
--- a/src/asm/scala/tools/asm/tree/InsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InsnNode.java
@@ -43,20 +43,22 @@ public class InsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link InsnNode}.
*
- * @param opcode the opcode of the instruction to be constructed. This
- * opcode must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
- * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
- * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD, LALOAD,
- * FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD, IASTORE, LASTORE,
- * FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE, POP, POP2,
- * DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP, IADD, LADD,
- * FADD, DADD, ISUB, LSUB, FSUB, DSUB, IMUL, LMUL, FMUL, DMUL, IDIV,
- * LDIV, FDIV, DDIV, IREM, LREM, FREM, DREM, INEG, LNEG, FNEG, DNEG,
- * ISHL, LSHL, ISHR, LSHR, IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR,
- * LXOR, I2L, I2F, I2D, L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F,
- * I2B, I2C, I2S, LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN,
- * FRETURN, DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW,
- * MONITORENTER, or MONITOREXIT.
+ * @param opcode
+ * the opcode of the instruction to be constructed. This opcode
+ * must be NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
+ * ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0, LCONST_1,
+ * FCONST_0, FCONST_1, FCONST_2, DCONST_0, DCONST_1, IALOAD,
+ * LALOAD, FALOAD, DALOAD, AALOAD, BALOAD, CALOAD, SALOAD,
+ * IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE,
+ * SASTORE, POP, POP2, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1,
+ * DUP2_X2, SWAP, IADD, LADD, FADD, DADD, ISUB, LSUB, FSUB, DSUB,
+ * IMUL, LMUL, FMUL, DMUL, IDIV, LDIV, FDIV, DDIV, IREM, LREM,
+ * FREM, DREM, INEG, LNEG, FNEG, DNEG, ISHL, LSHL, ISHR, LSHR,
+ * IUSHR, LUSHR, IAND, LAND, IOR, LOR, IXOR, LXOR, I2L, I2F, I2D,
+ * L2I, L2F, L2D, F2I, F2L, F2D, D2I, D2L, D2F, I2B, I2C, I2S,
+ * LCMP, FCMPL, FCMPG, DCMPL, DCMPG, IRETURN, LRETURN, FRETURN,
+ * DRETURN, ARETURN, RETURN, ARRAYLENGTH, ATHROW, MONITORENTER,
+ * or MONITOREXIT.
*/
public InsnNode(final int opcode) {
super(opcode);
@@ -70,7 +72,8 @@ public class InsnNode extends AbstractInsnNode {
/**
* Makes the given visitor visit this instruction.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
@Override
public void accept(final MethodVisitor mv) {
diff --git a/src/asm/scala/tools/asm/tree/IntInsnNode.java b/src/asm/scala/tools/asm/tree/IntInsnNode.java
index b61270c786..e0aeed4bc8 100644
--- a/src/asm/scala/tools/asm/tree/IntInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/IntInsnNode.java
@@ -48,9 +48,11 @@ public class IntInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link IntInsnNode}.
*
- * @param opcode the opcode of the instruction to be constructed. This
- * opcode must be BIPUSH, SIPUSH or NEWARRAY.
- * @param operand the operand of the instruction to be constructed.
+ * @param opcode
+ * the opcode of the instruction to be constructed. This opcode
+ * must be BIPUSH, SIPUSH or NEWARRAY.
+ * @param operand
+ * the operand of the instruction to be constructed.
*/
public IntInsnNode(final int opcode, final int operand) {
super(opcode);
@@ -60,8 +62,9 @@ public class IntInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be BIPUSH,
- * SIPUSH or NEWARRAY.
+ * @param opcode
+ * the new instruction opcode. This opcode must be BIPUSH, SIPUSH
+ * or NEWARRAY.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
index d993b5a054..7ee84b875b 100644
--- a/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/InvokeDynamicInsnNode.java
@@ -65,17 +65,17 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link InvokeDynamicInsnNode}.
*
- * @param name invokedynamic name.
- * @param desc invokedynamic descriptor (see {@link org.objectweb.asm.Type}).
- * @param bsm the bootstrap method.
- * @param bsmArgs the boostrap constant arguments.
+ * @param name
+ * invokedynamic name.
+ * @param desc
+ * invokedynamic descriptor (see {@link scala.tools.asm.Type}).
+ * @param bsm
+ * the bootstrap method.
+ * @param bsmArgs
+ * the boostrap constant arguments.
*/
- public InvokeDynamicInsnNode(
- final String name,
- final String desc,
- final Handle bsm,
- final Object... bsmArgs)
- {
+ public InvokeDynamicInsnNode(final String name, final String desc,
+ final Handle bsm, final Object... bsmArgs) {
super(Opcodes.INVOKEDYNAMIC);
this.name = name;
this.desc = desc;
@@ -97,4 +97,4 @@ public class InvokeDynamicInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/JumpInsnNode.java b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
index 339ebbd2d0..81e1e09deb 100644
--- a/src/asm/scala/tools/asm/tree/JumpInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/JumpInsnNode.java
@@ -50,13 +50,15 @@ public class JumpInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link JumpInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
- * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ,
- * IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
- * @param label the operand of the instruction to be constructed. This
- * operand is a label that designates the instruction to which the
- * jump instruction may jump.
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be IFEQ, IFNE, IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
+ * IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
+ * IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, IFNULL or IFNONNULL.
+ * @param label
+ * the operand of the instruction to be constructed. This operand
+ * is a label that designates the instruction to which the jump
+ * instruction may jump.
*/
public JumpInsnNode(final int opcode, final LabelNode label) {
super(opcode);
@@ -66,10 +68,11 @@ public class JumpInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be IFEQ, IFNE,
- * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
- * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO, JSR,
- * IFNULL or IFNONNULL.
+ * @param opcode
+ * the new instruction opcode. This opcode must be IFEQ, IFNE,
+ * IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT,
+ * IF_ICMPGE, IF_ICMPGT, IF_ICMPLE, IF_ACMPEQ, IF_ACMPNE, GOTO,
+ * JSR, IFNULL or IFNONNULL.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
diff --git a/src/asm/scala/tools/asm/tree/LabelNode.java b/src/asm/scala/tools/asm/tree/LabelNode.java
index 523a8d6442..44c48c1160 100644
--- a/src/asm/scala/tools/asm/tree/LabelNode.java
+++ b/src/asm/scala/tools/asm/tree/LabelNode.java
@@ -75,4 +75,4 @@ public class LabelNode extends AbstractInsnNode {
public void resetLabel() {
label = null;
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/LdcInsnNode.java b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
index f8d115acd5..4e328f9b39 100644
--- a/src/asm/scala/tools/asm/tree/LdcInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LdcInsnNode.java
@@ -44,16 +44,17 @@ public class LdcInsnNode extends AbstractInsnNode {
/**
* The constant to be loaded on the stack. This parameter must be a non null
* {@link Integer}, a {@link Float}, a {@link Long}, a {@link Double}, a
- * {@link String} or a {@link org.objectweb.asm.Type}.
+ * {@link String} or a {@link scala.tools.asm.Type}.
*/
public Object cst;
/**
* Constructs a new {@link LdcInsnNode}.
*
- * @param cst the constant to be loaded on the stack. This parameter must be
- * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
- * {@link Double} or a {@link String}.
+ * @param cst
+ * the constant to be loaded on the stack. This parameter must be
+ * a non null {@link Integer}, a {@link Float}, a {@link Long}, a
+ * {@link Double} or a {@link String}.
*/
public LdcInsnNode(final Object cst) {
super(Opcodes.LDC);
@@ -74,4 +75,4 @@ public class LdcInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new LdcInsnNode(cst);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/LineNumberNode.java b/src/asm/scala/tools/asm/tree/LineNumberNode.java
index acc83c8d30..9947aa70a9 100644
--- a/src/asm/scala/tools/asm/tree/LineNumberNode.java
+++ b/src/asm/scala/tools/asm/tree/LineNumberNode.java
@@ -55,9 +55,11 @@ public class LineNumberNode extends AbstractInsnNode {
/**
* Constructs a new {@link LineNumberNode}.
*
- * @param line a line number. This number refers to the source file from
- * which the class was compiled.
- * @param start the first instruction corresponding to this line number.
+ * @param line
+ * a line number. This number refers to the source file from
+ * which the class was compiled.
+ * @param start
+ * the first instruction corresponding to this line number.
*/
public LineNumberNode(final int line, final LabelNode start) {
super(-1);
diff --git a/src/asm/scala/tools/asm/tree/LocalVariableNode.java b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
index 51cbd3ca00..0d8e27356f 100644
--- a/src/asm/scala/tools/asm/tree/LocalVariableNode.java
+++ b/src/asm/scala/tools/asm/tree/LocalVariableNode.java
@@ -73,24 +73,24 @@ public class LocalVariableNode {
/**
* Constructs a new {@link LocalVariableNode}.
*
- * @param name the name of a local variable.
- * @param desc the type descriptor of this local variable.
- * @param signature the signature of this local variable. May be
- * <tt>null</tt>.
- * @param start the first instruction corresponding to the scope of this
- * local variable (inclusive).
- * @param end the last instruction corresponding to the scope of this local
- * variable (exclusive).
- * @param index the local variable's index.
+ * @param name
+ * the name of a local variable.
+ * @param desc
+ * the type descriptor of this local variable.
+ * @param signature
+ * the signature of this local variable. May be <tt>null</tt>.
+ * @param start
+ * the first instruction corresponding to the scope of this local
+ * variable (inclusive).
+ * @param end
+ * the last instruction corresponding to the scope of this local
+ * variable (exclusive).
+ * @param index
+ * the local variable's index.
*/
- public LocalVariableNode(
- final String name,
- final String desc,
- final String signature,
- final LabelNode start,
- final LabelNode end,
- final int index)
- {
+ public LocalVariableNode(final String name, final String desc,
+ final String signature, final LabelNode start, final LabelNode end,
+ final int index) {
this.name = name;
this.desc = desc;
this.signature = signature;
@@ -102,14 +102,11 @@ public class LocalVariableNode {
/**
* Makes the given visitor visit this local variable declaration.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
public void accept(final MethodVisitor mv) {
- mv.visitLocalVariable(name,
- desc,
- signature,
- start.getLabel(),
- end.getLabel(),
- index);
+ mv.visitLocalVariable(name, desc, signature, start.getLabel(),
+ end.getLabel(), index);
}
}
diff --git a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
index 6d0f971c29..d2479b4814 100644
--- a/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/LookupSwitchInsnNode.java
@@ -64,20 +64,21 @@ public class LookupSwitchInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link LookupSwitchInsnNode}.
*
- * @param dflt beginning of the default handler block.
- * @param keys the values of the keys.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>keys[i]</tt> key.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param keys
+ * the values of the keys.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>keys[i]</tt> key.
*/
- public LookupSwitchInsnNode(
- final LabelNode dflt,
- final int[] keys,
- final LabelNode[] labels)
- {
+ public LookupSwitchInsnNode(final LabelNode dflt, final int[] keys,
+ final LabelNode[] labels) {
super(Opcodes.LOOKUPSWITCH);
this.dflt = dflt;
this.keys = new ArrayList<Integer>(keys == null ? 0 : keys.length);
- this.labels = new ArrayList<LabelNode>(labels == null ? 0 : labels.length);
+ this.labels = new ArrayList<LabelNode>(labels == null ? 0
+ : labels.length);
if (keys != null) {
for (int i = 0; i < keys.length; ++i) {
this.keys.add(new Integer(keys[i]));
diff --git a/src/asm/scala/tools/asm/tree/MethodInsnNode.java b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
index c3036bc6b4..bf09f556d8 100644
--- a/src/asm/scala/tools/asm/tree/MethodInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodInsnNode.java
@@ -43,7 +43,7 @@ public class MethodInsnNode extends AbstractInsnNode {
/**
* The internal name of the method's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
+ * {@link scala.tools.asm.Type#getInternalName() getInternalName}).
*/
public String owner;
@@ -53,27 +53,28 @@ public class MethodInsnNode extends AbstractInsnNode {
public String name;
/**
- * The method's descriptor (see {@link org.objectweb.asm.Type}).
+ * The method's descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
/**
* Constructs a new {@link MethodInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
- * INVOKEINTERFACE.
- * @param owner the internal name of the method's owner class (see
- * {@link org.objectweb.asm.Type#getInternalName() getInternalName}).
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link org.objectweb.asm.Type}).
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or
+ * INVOKEINTERFACE.
+ * @param owner
+ * the internal name of the method's owner class (see
+ * {@link scala.tools.asm.Type#getInternalName()
+ * getInternalName}).
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link scala.tools.asm.Type}).
*/
- public MethodInsnNode(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public MethodInsnNode(final int opcode, final String owner,
+ final String name, final String desc) {
super(opcode);
this.owner = owner;
this.name = name;
@@ -83,8 +84,9 @@ public class MethodInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be
- * INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
+ * @param opcode
+ * the new instruction opcode. This opcode must be INVOKEVIRTUAL,
+ * INVOKESPECIAL, INVOKESTATIC or INVOKEINTERFACE.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
@@ -104,4 +106,4 @@ public class MethodInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new MethodInsnNode(opcode, owner, name, desc);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
index 70ec39e058..5f9c778e0c 100644
--- a/src/asm/scala/tools/asm/tree/MethodNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -81,7 +81,7 @@ public class MethodNode extends MethodVisitor {
* The runtime visible annotations of this method. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible
*/
public List<AnnotationNode> visibleAnnotations;
@@ -90,7 +90,7 @@ public class MethodNode extends MethodVisitor {
* The runtime invisible annotations of this method. This list is a list of
* {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible
*/
public List<AnnotationNode> invisibleAnnotations;
@@ -99,7 +99,7 @@ public class MethodNode extends MethodVisitor {
* The non standard attributes of this method. This list is a list of
* {@link Attribute} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.Attribute
+ * @associates scala.tools.asm.Attribute
*/
public List<Attribute> attrs;
@@ -117,7 +117,7 @@ public class MethodNode extends MethodVisitor {
* The runtime visible parameter annotations of this method. These lists are
* lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label invisible parameters
*/
public List<AnnotationNode>[] visibleParameterAnnotations;
@@ -126,7 +126,7 @@ public class MethodNode extends MethodVisitor {
* The runtime invisible parameter annotations of this method. These lists
* are lists of {@link AnnotationNode} objects. May be <tt>null</tt>.
*
- * @associates org.objectweb.asm.tree.AnnotationNode
+ * @associates scala.tools.asm.tree.AnnotationNode
* @label visible parameters
*/
public List<AnnotationNode>[] invisibleParameterAnnotations;
@@ -135,7 +135,7 @@ public class MethodNode extends MethodVisitor {
* The instructions of this method. This list is a list of
* {@link AbstractInsnNode} objects.
*
- * @associates org.objectweb.asm.tree.AbstractInsnNode
+ * @associates scala.tools.asm.tree.AbstractInsnNode
* @label instructions
*/
public InsnList instructions;
@@ -144,7 +144,7 @@ public class MethodNode extends MethodVisitor {
* The try catch blocks of this method. This list is a list of
* {@link TryCatchBlockNode} objects.
*
- * @associates org.objectweb.asm.tree.TryCatchBlockNode
+ * @associates scala.tools.asm.tree.TryCatchBlockNode
*/
public List<TryCatchBlockNode> tryCatchBlocks;
@@ -162,7 +162,7 @@ public class MethodNode extends MethodVisitor {
* The local variables of this method. This list is a list of
* {@link LocalVariableNode} objects. May be <tt>null</tt>
*
- * @associates org.objectweb.asm.tree.LocalVariableNode
+ * @associates scala.tools.asm.tree.LocalVariableNode
*/
public List<LocalVariableNode> localVariables;
@@ -170,7 +170,7 @@ public class MethodNode extends MethodVisitor {
* If the accept method has been called on this object.
*/
private boolean visited;
-
+
/**
* Constructs an uninitialized {@link MethodNode}. <i>Subclasses must not
* use this constructor</i>. Instead, they must use the
@@ -183,8 +183,9 @@ public class MethodNode extends MethodVisitor {
/**
* Constructs an uninitialized {@link MethodNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
public MethodNode(final int api) {
super(api);
@@ -196,56 +197,55 @@ public class MethodNode extends MethodVisitor {
* constructor</i>. Instead, they must use the
* {@link #MethodNode(int, int, String, String, String, String[])} version.
*
- * @param access the method's access flags (see {@link Opcodes}). This
- * parameter also indicates if the method is synthetic and/or
- * deprecated.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type}).
- * @param signature the method's signature. May be <tt>null</tt>.
- * @param exceptions the internal names of the method's exception classes
- * (see {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param access
+ * the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt>.
+ * @param exceptions
+ * the internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
- public MethodNode(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public MethodNode(final int access, final String name, final String desc,
+ final String signature, final String[] exceptions) {
this(Opcodes.ASM4, access, name, desc, signature, exceptions);
}
/**
* Constructs a new {@link MethodNode}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param access the method's access flags (see {@link Opcodes}). This
- * parameter also indicates if the method is synthetic and/or
- * deprecated.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type}).
- * @param signature the method's signature. May be <tt>null</tt>.
- * @param exceptions the internal names of the method's exception classes
- * (see {@link Type#getInternalName() getInternalName}). May be
- * <tt>null</tt>.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param access
+ * the method's access flags (see {@link Opcodes}). This
+ * parameter also indicates if the method is synthetic and/or
+ * deprecated.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type}).
+ * @param signature
+ * the method's signature. May be <tt>null</tt>.
+ * @param exceptions
+ * the internal names of the method's exception classes (see
+ * {@link Type#getInternalName() getInternalName}). May be
+ * <tt>null</tt>.
*/
- public MethodNode(
- final int api,
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public MethodNode(final int api, final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
super(api);
this.access = access;
this.name = name;
this.desc = desc;
this.signature = signature;
- this.exceptions = new ArrayList<String>(exceptions == null
- ? 0
+ this.exceptions = new ArrayList<String>(exceptions == null ? 0
: exceptions.length);
boolean isAbstract = (access & Opcodes.ACC_ABSTRACT) != 0;
if (!isAbstract) {
@@ -274,10 +274,8 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleAnnotations == null) {
@@ -294,28 +292,27 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
AnnotationNode an = new AnnotationNode(desc);
if (visible) {
if (visibleParameterAnnotations == null) {
int params = Type.getArgumentTypes(this.desc).length;
- visibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ visibleParameterAnnotations = (List<AnnotationNode>[]) new List<?>[params];
}
if (visibleParameterAnnotations[parameter] == null) {
- visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ visibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(
+ 1);
}
visibleParameterAnnotations[parameter].add(an);
} else {
if (invisibleParameterAnnotations == null) {
int params = Type.getArgumentTypes(this.desc).length;
- invisibleParameterAnnotations = (List<AnnotationNode>[])new List<?>[params];
+ invisibleParameterAnnotations = (List<AnnotationNode>[]) new List<?>[params];
}
if (invisibleParameterAnnotations[parameter] == null) {
- invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(1);
+ invisibleParameterAnnotations[parameter] = new ArrayList<AnnotationNode>(
+ 1);
}
invisibleParameterAnnotations[parameter].add(an);
}
@@ -335,17 +332,10 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
- instructions.add(new FrameNode(type, nLocal, local == null
- ? null
- : getLabelNodes(local), nStack, stack == null
- ? null
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
+ instructions.add(new FrameNode(type, nLocal, local == null ? null
+ : getLabelNodes(local), nStack, stack == null ? null
: getLabelNodes(stack)));
}
@@ -370,32 +360,20 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
instructions.add(new FieldInsnNode(opcode, owner, name, desc));
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
instructions.add(new MethodInsnNode(opcode, owner, name, desc));
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
instructions.add(new InvokeDynamicInsnNode(name, desc, bsm, bsmArgs));
}
@@ -420,26 +398,16 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
- instructions.add(new TableSwitchInsnNode(min,
- max,
- getLabelNode(dflt),
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
+ instructions.add(new TableSwitchInsnNode(min, max, getLabelNode(dflt),
getLabelNodes(labels)));
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
- instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt),
- keys,
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
+ instructions.add(new LookupSwitchInsnNode(getLabelNode(dflt), keys,
getLabelNodes(labels)));
}
@@ -449,33 +417,18 @@ public class MethodNode extends MethodVisitor {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
tryCatchBlocks.add(new TryCatchBlockNode(getLabelNode(start),
- getLabelNode(end),
- getLabelNode(handler),
- type));
+ getLabelNode(end), getLabelNode(handler), type));
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
- localVariables.add(new LocalVariableNode(name,
- desc,
- signature,
- getLabelNode(start),
- getLabelNode(end),
- index));
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
+ localVariables.add(new LocalVariableNode(name, desc, signature,
+ getLabelNode(start), getLabelNode(end), index));
}
@Override
@@ -499,12 +452,13 @@ public class MethodNode extends MethodVisitor {
* the {@link Label#info} field to store associations between labels and
* label nodes.
*
- * @param l a Label.
+ * @param l
+ * a Label.
* @return the LabelNode corresponding to l.
*/
protected LabelNode getLabelNode(final Label l) {
if (!(l.info instanceof LabelNode)) {
- l.info = new LabelNode(l);
+ l.info = new LabelNode();
}
return (LabelNode) l.info;
}
@@ -539,7 +493,8 @@ public class MethodNode extends MethodVisitor {
* recursively, do not contain elements that were introduced in more recent
* versions of the ASM API than the given version.
*
- * @param api an ASM API version. Must be one of {@link Opcodes#ASM4}.
+ * @param api
+ * an ASM API version. Must be one of {@link Opcodes#ASM4}.
*/
public void check(final int api) {
// nothing to do
@@ -548,15 +503,13 @@ public class MethodNode extends MethodVisitor {
/**
* Makes the given class visitor visit this method.
*
- * @param cv a class visitor.
+ * @param cv
+ * a class visitor.
*/
public void accept(final ClassVisitor cv) {
String[] exceptions = new String[this.exceptions.size()];
this.exceptions.toArray(exceptions);
- MethodVisitor mv = cv.visitMethod(access,
- name,
- desc,
- signature,
+ MethodVisitor mv = cv.visitMethod(access, name, desc, signature,
exceptions);
if (mv != null) {
accept(mv);
@@ -566,7 +519,8 @@ public class MethodNode extends MethodVisitor {
/**
* Makes the given method visitor visit this method.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
public void accept(final MethodVisitor mv) {
// visits the method attributes
@@ -588,8 +542,7 @@ public class MethodNode extends MethodVisitor {
AnnotationNode an = invisibleAnnotations.get(i);
an.accept(mv.visitAnnotation(an.desc, false));
}
- n = visibleParameterAnnotations == null
- ? 0
+ n = visibleParameterAnnotations == null ? 0
: visibleParameterAnnotations.length;
for (i = 0; i < n; ++i) {
List<?> l = visibleParameterAnnotations[i];
@@ -601,8 +554,7 @@ public class MethodNode extends MethodVisitor {
an.accept(mv.visitParameterAnnotation(i, an.desc, true));
}
}
- n = invisibleParameterAnnotations == null
- ? 0
+ n = invisibleParameterAnnotations == null ? 0
: invisibleParameterAnnotations.length;
for (i = 0; i < n; ++i) {
List<?> l = invisibleParameterAnnotations[i];
diff --git a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
index 9dfba77335..fe5e8832b3 100644
--- a/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/MultiANewArrayInsnNode.java
@@ -42,7 +42,7 @@ import scala.tools.asm.Opcodes;
public class MultiANewArrayInsnNode extends AbstractInsnNode {
/**
- * An array type descriptor (see {@link org.objectweb.asm.Type}).
+ * An array type descriptor (see {@link scala.tools.asm.Type}).
*/
public String desc;
@@ -54,8 +54,10 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link MultiANewArrayInsnNode}.
*
- * @param desc an array type descriptor (see {@link org.objectweb.asm.Type}).
- * @param dims number of dimensions of the array to allocate.
+ * @param desc
+ * an array type descriptor (see {@link scala.tools.asm.Type}).
+ * @param dims
+ * number of dimensions of the array to allocate.
*/
public MultiANewArrayInsnNode(final String desc, final int dims) {
super(Opcodes.MULTIANEWARRAY);
@@ -78,4 +80,4 @@ public class MultiANewArrayInsnNode extends AbstractInsnNode {
return new MultiANewArrayInsnNode(desc, dims);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
index 929ad9b32b..9b3c2a3437 100644
--- a/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TableSwitchInsnNode.java
@@ -69,18 +69,18 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link TableSwitchInsnNode}.
*
- * @param min the minimum key value.
- * @param max the maximum key value.
- * @param dflt beginning of the default handler block.
- * @param labels beginnings of the handler blocks. <tt>labels[i]</tt> is
- * the beginning of the handler block for the <tt>min + i</tt> key.
+ * @param min
+ * the minimum key value.
+ * @param max
+ * the maximum key value.
+ * @param dflt
+ * beginning of the default handler block.
+ * @param labels
+ * beginnings of the handler blocks. <tt>labels[i]</tt> is the
+ * beginning of the handler block for the <tt>min + i</tt> key.
*/
- public TableSwitchInsnNode(
- final int min,
- final int max,
- final LabelNode dflt,
- final LabelNode... labels)
- {
+ public TableSwitchInsnNode(final int min, final int max,
+ final LabelNode dflt, final LabelNode... labels) {
super(Opcodes.TABLESWITCH);
this.min = min;
this.max = max;
@@ -107,9 +107,7 @@ public class TableSwitchInsnNode extends AbstractInsnNode {
@Override
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
- return new TableSwitchInsnNode(min,
- max,
- clone(dflt, labels),
- clone(this.labels, labels));
+ return new TableSwitchInsnNode(min, max, clone(dflt, labels), clone(
+ this.labels, labels));
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
index 375b4cfcb9..ab4fa97c34 100644
--- a/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
+++ b/src/asm/scala/tools/asm/tree/TryCatchBlockNode.java
@@ -62,19 +62,19 @@ public class TryCatchBlockNode {
/**
* Constructs a new {@link TryCatchBlockNode}.
*
- * @param start beginning of the exception handler's scope (inclusive).
- * @param end end of the exception handler's scope (exclusive).
- * @param handler beginning of the exception handler's code.
- * @param type internal name of the type of exceptions handled by the
- * handler, or <tt>null</tt> to catch any exceptions (for "finally"
- * blocks).
+ * @param start
+ * beginning of the exception handler's scope (inclusive).
+ * @param end
+ * end of the exception handler's scope (exclusive).
+ * @param handler
+ * beginning of the exception handler's code.
+ * @param type
+ * internal name of the type of exceptions handled by the
+ * handler, or <tt>null</tt> to catch any exceptions (for
+ * "finally" blocks).
*/
- public TryCatchBlockNode(
- final LabelNode start,
- final LabelNode end,
- final LabelNode handler,
- final String type)
- {
+ public TryCatchBlockNode(final LabelNode start, final LabelNode end,
+ final LabelNode handler, final String type) {
this.start = start;
this.end = end;
this.handler = handler;
@@ -84,11 +84,11 @@ public class TryCatchBlockNode {
/**
* Makes the given visitor visit this try catch block.
*
- * @param mv a method visitor.
+ * @param mv
+ * a method visitor.
*/
public void accept(final MethodVisitor mv) {
- mv.visitTryCatchBlock(start.getLabel(), end.getLabel(), handler == null
- ? null
- : handler.getLabel(), type);
+ mv.visitTryCatchBlock(start.getLabel(), end.getLabel(),
+ handler == null ? null : handler.getLabel(), type);
}
}
diff --git a/src/asm/scala/tools/asm/tree/TypeInsnNode.java b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
index 0b2666c498..3210dd60e6 100644
--- a/src/asm/scala/tools/asm/tree/TypeInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/TypeInsnNode.java
@@ -43,17 +43,19 @@ public class TypeInsnNode extends AbstractInsnNode {
/**
* The operand of this instruction. This operand is an internal name (see
- * {@link org.objectweb.asm.Type}).
+ * {@link scala.tools.asm.Type}).
*/
public String desc;
/**
* Constructs a new {@link TypeInsnNode}.
*
- * @param opcode the opcode of the type instruction to be constructed. This
- * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
- * @param desc the operand of the instruction to be constructed. This
- * operand is an internal name (see {@link org.objectweb.asm.Type}).
+ * @param opcode
+ * the opcode of the type instruction to be constructed. This
+ * opcode must be NEW, ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param desc
+ * the operand of the instruction to be constructed. This operand
+ * is an internal name (see {@link scala.tools.asm.Type}).
*/
public TypeInsnNode(final int opcode, final String desc) {
super(opcode);
@@ -63,8 +65,9 @@ public class TypeInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be NEW,
- * ANEWARRAY, CHECKCAST or INSTANCEOF.
+ * @param opcode
+ * the new instruction opcode. This opcode must be NEW,
+ * ANEWARRAY, CHECKCAST or INSTANCEOF.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
@@ -84,4 +87,4 @@ public class TypeInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new TypeInsnNode(opcode, desc);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/VarInsnNode.java b/src/asm/scala/tools/asm/tree/VarInsnNode.java
index 89f572db59..5dd9ef6726 100644
--- a/src/asm/scala/tools/asm/tree/VarInsnNode.java
+++ b/src/asm/scala/tools/asm/tree/VarInsnNode.java
@@ -51,11 +51,13 @@ public class VarInsnNode extends AbstractInsnNode {
/**
* Constructs a new {@link VarInsnNode}.
*
- * @param opcode the opcode of the local variable instruction to be
- * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
- * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
- * @param var the operand of the instruction to be constructed. This operand
- * is the index of a local variable.
+ * @param opcode
+ * the opcode of the local variable instruction to be
+ * constructed. This opcode must be ILOAD, LLOAD, FLOAD, DLOAD,
+ * ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or RET.
+ * @param var
+ * the operand of the instruction to be constructed. This operand
+ * is the index of a local variable.
*/
public VarInsnNode(final int opcode, final int var) {
super(opcode);
@@ -65,9 +67,10 @@ public class VarInsnNode extends AbstractInsnNode {
/**
* Sets the opcode of this instruction.
*
- * @param opcode the new instruction opcode. This opcode must be ILOAD,
- * LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE
- * or RET.
+ * @param opcode
+ * the new instruction opcode. This opcode must be ILOAD, LLOAD,
+ * FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE, ASTORE or
+ * RET.
*/
public void setOpcode(final int opcode) {
this.opcode = opcode;
@@ -87,4 +90,4 @@ public class VarInsnNode extends AbstractInsnNode {
public AbstractInsnNode clone(final Map<LabelNode, LabelNode> labels) {
return new VarInsnNode(opcode, var);
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
index df387b0b8e..0134555f10 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Analyzer.java
@@ -51,9 +51,10 @@ import scala.tools.asm.tree.VarInsnNode;
* A semantic bytecode analyzer. <i>This class does not fully check that JSR and
* RET instructions are valid.</i>
*
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ * type of the Value used for the analysis.
*
- * @author Eric Bruneton
+ * @author Eric Bruneton
*/
public class Analyzer<V extends Value> implements Opcodes {
@@ -78,8 +79,9 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Constructs a new {@link Analyzer}.
*
- * @param interpreter the interpreter to be used to symbolically interpret
- * the bytecode instructions.
+ * @param interpreter
+ * the interpreter to be used to symbolically interpret the
+ * bytecode instructions.
*/
public Analyzer(final Interpreter<V> interpreter) {
this.interpreter = interpreter;
@@ -88,26 +90,28 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Analyzes the given method.
*
- * @param owner the internal name of the class to which the method belongs.
- * @param m the method to be analyzed.
+ * @param owner
+ * the internal name of the class to which the method belongs.
+ * @param m
+ * the method to be analyzed.
* @return the symbolic state of the execution stack frame at each bytecode
* instruction of the method. The size of the returned array is
* equal to the number of instructions (and labels) of the method. A
* given frame is <tt>null</tt> if and only if the corresponding
* instruction cannot be reached (dead code).
- * @throws AnalyzerException if a problem occurs during the analysis.
+ * @throws AnalyzerException
+ * if a problem occurs during the analysis.
*/
public Frame<V>[] analyze(final String owner, final MethodNode m)
- throws AnalyzerException
- {
+ throws AnalyzerException {
if ((m.access & (ACC_ABSTRACT | ACC_NATIVE)) != 0) {
- frames = (Frame<V>[])new Frame<?>[0];
+ frames = (Frame<V>[]) new Frame<?>[0];
return frames;
}
n = m.instructions.size();
insns = m.instructions;
- handlers = (List<TryCatchBlockNode>[])new List<?>[n];
- frames = (Frame<V>[])new Frame<?>[n];
+ handlers = (List<TryCatchBlockNode>[]) new List<?>[n];
+ frames = (Frame<V>[]) new Frame<?>[n];
subroutines = new Subroutine[n];
queued = new boolean[n];
queue = new int[n];
@@ -188,8 +192,7 @@ public class Analyzer<V extends Value> implements Opcodes {
if (insnType == AbstractInsnNode.LABEL
|| insnType == AbstractInsnNode.LINE
- || insnType == AbstractInsnNode.FRAME)
- {
+ || insnType == AbstractInsnNode.FRAME) {
merge(insn + 1, f, subroutine);
newControlFlowEdge(insn, insn + 1);
} else {
@@ -205,8 +208,7 @@ public class Analyzer<V extends Value> implements Opcodes {
int jump = insns.indexOf(j.label);
if (insnOpcode == JSR) {
merge(jump, current, new Subroutine(j.label,
- m.maxLocals,
- j));
+ m.maxLocals, j));
} else {
merge(jump, current, subroutine);
}
@@ -235,31 +237,27 @@ public class Analyzer<V extends Value> implements Opcodes {
}
} else if (insnOpcode == RET) {
if (subroutine == null) {
- throw new AnalyzerException(insnNode, "RET instruction outside of a sub routine");
+ throw new AnalyzerException(insnNode,
+ "RET instruction outside of a sub routine");
}
for (int i = 0; i < subroutine.callers.size(); ++i) {
JumpInsnNode caller = subroutine.callers.get(i);
int call = insns.indexOf(caller);
if (frames[call] != null) {
- merge(call + 1,
- frames[call],
- current,
- subroutines[call],
- subroutine.access);
+ merge(call + 1, frames[call], current,
+ subroutines[call], subroutine.access);
newControlFlowEdge(insn, call + 1);
}
}
} else if (insnOpcode != ATHROW
- && (insnOpcode < IRETURN || insnOpcode > RETURN))
- {
+ && (insnOpcode < IRETURN || insnOpcode > RETURN)) {
if (subroutine != null) {
if (insnNode instanceof VarInsnNode) {
int var = ((VarInsnNode) insnNode).var;
subroutine.access[var] = true;
if (insnOpcode == LLOAD || insnOpcode == DLOAD
|| insnOpcode == LSTORE
- || insnOpcode == DSTORE)
- {
+ || insnOpcode == DSTORE) {
subroutine.access[var + 1] = true;
}
} else if (insnNode instanceof IincInsnNode) {
@@ -292,23 +290,23 @@ public class Analyzer<V extends Value> implements Opcodes {
}
}
} catch (AnalyzerException e) {
- throw new AnalyzerException(e.node, "Error at instruction " + insn
- + ": " + e.getMessage(), e);
+ throw new AnalyzerException(e.node, "Error at instruction "
+ + insn + ": " + e.getMessage(), e);
} catch (Exception e) {
- throw new AnalyzerException(insnNode, "Error at instruction " + insn
- + ": " + e.getMessage(), e);
+ throw new AnalyzerException(insnNode, "Error at instruction "
+ + insn + ": " + e.getMessage(), e);
}
}
return frames;
}
- private void findSubroutine(int insn, final Subroutine sub, final List<AbstractInsnNode> calls)
- throws AnalyzerException
- {
+ private void findSubroutine(int insn, final Subroutine sub,
+ final List<AbstractInsnNode> calls) throws AnalyzerException {
while (true) {
if (insn < 0 || insn >= n) {
- throw new AnalyzerException(null, "Execution can fall off end of the code");
+ throw new AnalyzerException(null,
+ "Execution can fall off end of the code");
}
if (subroutines[insn] != null) {
return;
@@ -352,18 +350,18 @@ public class Analyzer<V extends Value> implements Opcodes {
// if insn does not falls through to the next instruction, return.
switch (node.getOpcode()) {
- case GOTO:
- case RET:
- case TABLESWITCH:
- case LOOKUPSWITCH:
- case IRETURN:
- case LRETURN:
- case FRETURN:
- case DRETURN:
- case ARETURN:
- case RETURN:
- case ATHROW:
- return;
+ case GOTO:
+ case RET:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case RETURN:
+ case ATHROW:
+ return;
}
insn++;
}
@@ -387,8 +385,9 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Returns the exception handlers for the given instruction.
*
- * @param insn the index of an instruction of the last recently analyzed
- * method.
+ * @param insn
+ * the index of an instruction of the last recently analyzed
+ * method.
* @return a list of {@link TryCatchBlockNode} objects.
*/
public List<TryCatchBlockNode> getHandlers(final int insn) {
@@ -400,9 +399,12 @@ public class Analyzer<V extends Value> implements Opcodes {
* execution of control flow analysis loop in #analyze. The default
* implementation of this method does nothing.
*
- * @param owner the internal name of the class to which the method belongs.
- * @param m the method to be analyzed.
- * @throws AnalyzerException if a problem occurs.
+ * @param owner
+ * the internal name of the class to which the method belongs.
+ * @param m
+ * the method to be analyzed.
+ * @throws AnalyzerException
+ * if a problem occurs.
*/
protected void init(String owner, MethodNode m) throws AnalyzerException {
}
@@ -410,8 +412,10 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Constructs a new frame with the given size.
*
- * @param nLocals the maximum number of local variables of the frame.
- * @param nStack the maximum stack size of the frame.
+ * @param nLocals
+ * the maximum number of local variables of the frame.
+ * @param nStack
+ * the maximum stack size of the frame.
* @return the created frame.
*/
protected Frame<V> newFrame(final int nLocals, final int nStack) {
@@ -421,7 +425,8 @@ public class Analyzer<V extends Value> implements Opcodes {
/**
* Constructs a new frame that is identical to the given frame.
*
- * @param src a frame.
+ * @param src
+ * a frame.
* @return the created frame.
*/
protected Frame<V> newFrame(final Frame<? extends V> src) {
@@ -434,8 +439,10 @@ public class Analyzer<V extends Value> implements Opcodes {
* control flow graph of a method (this method is called by the
* {@link #analyze analyze} method during its visit of the method's code).
*
- * @param insn an instruction index.
- * @param successor index of a successor instruction.
+ * @param insn
+ * an instruction index.
+ * @param successor
+ * index of a successor instruction.
*/
protected void newControlFlowEdge(final int insn, final int successor) {
}
@@ -447,16 +454,16 @@ public class Analyzer<V extends Value> implements Opcodes {
* method is called by the {@link #analyze analyze} method during its visit
* of the method's code).
*
- * @param insn an instruction index.
- * @param successor index of a successor instruction.
+ * @param insn
+ * an instruction index.
+ * @param successor
+ * index of a successor instruction.
* @return true if this edge must be considered in the data flow analysis
* performed by this analyzer, or false otherwise. The default
* implementation of this method always returns true.
*/
- protected boolean newControlFlowExceptionEdge(
- final int insn,
- final int successor)
- {
+ protected boolean newControlFlowExceptionEdge(final int insn,
+ final int successor) {
return true;
}
@@ -469,28 +476,25 @@ public class Analyzer<V extends Value> implements Opcodes {
* the {@link #analyze analyze} method during its visit of the method's
* code).
*
- * @param insn an instruction index.
- * @param tcb TryCatchBlockNode corresponding to this edge.
+ * @param insn
+ * an instruction index.
+ * @param tcb
+ * TryCatchBlockNode corresponding to this edge.
* @return true if this edge must be considered in the data flow analysis
* performed by this analyzer, or false otherwise. The default
* implementation of this method delegates to
* {@link #newControlFlowExceptionEdge(int, int)
* newControlFlowExceptionEdge(int, int)}.
*/
- protected boolean newControlFlowExceptionEdge(
- final int insn,
- final TryCatchBlockNode tcb)
- {
+ protected boolean newControlFlowExceptionEdge(final int insn,
+ final TryCatchBlockNode tcb) {
return newControlFlowExceptionEdge(insn, insns.indexOf(tcb.handler));
}
// -------------------------------------------------------------------------
- private void merge(
- final int insn,
- final Frame<V> frame,
- final Subroutine subroutine) throws AnalyzerException
- {
+ private void merge(final int insn, final Frame<V> frame,
+ final Subroutine subroutine) throws AnalyzerException {
Frame<V> oldFrame = frames[insn];
Subroutine oldSubroutine = subroutines[insn];
boolean changes;
@@ -518,13 +522,9 @@ public class Analyzer<V extends Value> implements Opcodes {
}
}
- private void merge(
- final int insn,
- final Frame<V> beforeJSR,
- final Frame<V> afterRET,
- final Subroutine subroutineBeforeJSR,
- final boolean[] access) throws AnalyzerException
- {
+ private void merge(final int insn, final Frame<V> beforeJSR,
+ final Frame<V> afterRET, final Subroutine subroutineBeforeJSR,
+ final boolean[] access) throws AnalyzerException {
Frame<V> oldFrame = frames[insn];
Subroutine oldSubroutine = subroutines[insn];
boolean changes;
diff --git a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
index a89bb3513f..5e3f51f21a 100644
--- a/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
+++ b/src/asm/scala/tools/asm/tree/analysis/AnalyzerException.java
@@ -46,17 +46,14 @@ public class AnalyzerException extends Exception {
this.node = node;
}
- public AnalyzerException(final AbstractInsnNode node, final String msg, final Throwable exception) {
+ public AnalyzerException(final AbstractInsnNode node, final String msg,
+ final Throwable exception) {
super(msg, exception);
this.node = node;
}
- public AnalyzerException(
- final AbstractInsnNode node,
- final String msg,
- final Object expected,
- final Value encountered)
- {
+ public AnalyzerException(final AbstractInsnNode node, final String msg,
+ final Object expected, final Value encountered) {
super((msg == null ? "Expected " : msg + ": expected ") + expected
+ ", but found " + encountered);
this.node = node;
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
index 64ddcc11e6..8d6653c1c5 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicInterpreter.java
@@ -50,8 +50,7 @@ import scala.tools.asm.tree.TypeInsnNode;
* @author Bing Ran
*/
public class BasicInterpreter extends Interpreter<BasicValue> implements
- Opcodes
-{
+ Opcodes {
public BasicInterpreter() {
super(ASM4);
@@ -67,292 +66,286 @@ public class BasicInterpreter extends Interpreter<BasicValue> implements
return BasicValue.UNINITIALIZED_VALUE;
}
switch (type.getSort()) {
- case Type.VOID:
- return null;
- case Type.BOOLEAN:
- case Type.CHAR:
- case Type.BYTE:
- case Type.SHORT:
- case Type.INT:
- return BasicValue.INT_VALUE;
- case Type.FLOAT:
- return BasicValue.FLOAT_VALUE;
- case Type.LONG:
- return BasicValue.LONG_VALUE;
- case Type.DOUBLE:
- return BasicValue.DOUBLE_VALUE;
- case Type.ARRAY:
- case Type.OBJECT:
- return BasicValue.REFERENCE_VALUE;
- default:
- throw new Error("Internal error");
+ case Type.VOID:
+ return null;
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ case Type.INT:
+ return BasicValue.INT_VALUE;
+ case Type.FLOAT:
+ return BasicValue.FLOAT_VALUE;
+ case Type.LONG:
+ return BasicValue.LONG_VALUE;
+ case Type.DOUBLE:
+ return BasicValue.DOUBLE_VALUE;
+ case Type.ARRAY:
+ case Type.OBJECT:
+ return BasicValue.REFERENCE_VALUE;
+ default:
+ throw new Error("Internal error");
}
}
@Override
public BasicValue newOperation(final AbstractInsnNode insn)
- throws AnalyzerException
- {
+ throws AnalyzerException {
switch (insn.getOpcode()) {
- case ACONST_NULL:
- return newValue(Type.getObjectType("null"));
- case ICONST_M1:
- case ICONST_0:
- case ICONST_1:
- case ICONST_2:
- case ICONST_3:
- case ICONST_4:
- case ICONST_5:
+ case ACONST_NULL:
+ return newValue(Type.getObjectType("null"));
+ case ICONST_M1:
+ case ICONST_0:
+ case ICONST_1:
+ case ICONST_2:
+ case ICONST_3:
+ case ICONST_4:
+ case ICONST_5:
+ return BasicValue.INT_VALUE;
+ case LCONST_0:
+ case LCONST_1:
+ return BasicValue.LONG_VALUE;
+ case FCONST_0:
+ case FCONST_1:
+ case FCONST_2:
+ return BasicValue.FLOAT_VALUE;
+ case DCONST_0:
+ case DCONST_1:
+ return BasicValue.DOUBLE_VALUE;
+ case BIPUSH:
+ case SIPUSH:
+ return BasicValue.INT_VALUE;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ if (cst instanceof Integer) {
return BasicValue.INT_VALUE;
- case LCONST_0:
- case LCONST_1:
- return BasicValue.LONG_VALUE;
- case FCONST_0:
- case FCONST_1:
- case FCONST_2:
+ } else if (cst instanceof Float) {
return BasicValue.FLOAT_VALUE;
- case DCONST_0:
- case DCONST_1:
+ } else if (cst instanceof Long) {
+ return BasicValue.LONG_VALUE;
+ } else if (cst instanceof Double) {
return BasicValue.DOUBLE_VALUE;
- case BIPUSH:
- case SIPUSH:
- return BasicValue.INT_VALUE;
- case LDC:
- Object cst = ((LdcInsnNode) insn).cst;
- if (cst instanceof Integer) {
- return BasicValue.INT_VALUE;
- } else if (cst instanceof Float) {
- return BasicValue.FLOAT_VALUE;
- } else if (cst instanceof Long) {
- return BasicValue.LONG_VALUE;
- } else if (cst instanceof Double) {
- return BasicValue.DOUBLE_VALUE;
- } else if (cst instanceof String) {
- return newValue(Type.getObjectType("java/lang/String"));
- } else if (cst instanceof Type) {
- int sort = ((Type) cst).getSort();
- if (sort == Type.OBJECT || sort == Type.ARRAY) {
- return newValue(Type.getObjectType("java/lang/Class"));
- } else if (sort == Type.METHOD) {
- return newValue(Type.getObjectType("java/lang/invoke/MethodType"));
- } else {
- throw new IllegalArgumentException("Illegal LDC constant " + cst);
- }
- } else if (cst instanceof Handle) {
- return newValue(Type.getObjectType("java/lang/invoke/MethodHandle"));
+ } else if (cst instanceof String) {
+ return newValue(Type.getObjectType("java/lang/String"));
+ } else if (cst instanceof Type) {
+ int sort = ((Type) cst).getSort();
+ if (sort == Type.OBJECT || sort == Type.ARRAY) {
+ return newValue(Type.getObjectType("java/lang/Class"));
+ } else if (sort == Type.METHOD) {
+ return newValue(Type
+ .getObjectType("java/lang/invoke/MethodType"));
} else {
- throw new IllegalArgumentException("Illegal LDC constant " + cst);
+ throw new IllegalArgumentException("Illegal LDC constant "
+ + cst);
}
- case JSR:
- return BasicValue.RETURNADDRESS_VALUE;
- case GETSTATIC:
- return newValue(Type.getType(((FieldInsnNode) insn).desc));
- case NEW:
- return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
- default:
- throw new Error("Internal error.");
+ } else if (cst instanceof Handle) {
+ return newValue(Type
+ .getObjectType("java/lang/invoke/MethodHandle"));
+ } else {
+ throw new IllegalArgumentException("Illegal LDC constant "
+ + cst);
+ }
+ case JSR:
+ return BasicValue.RETURNADDRESS_VALUE;
+ case GETSTATIC:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEW:
+ return newValue(Type.getObjectType(((TypeInsnNode) insn).desc));
+ default:
+ throw new Error("Internal error.");
}
}
@Override
- public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue copyOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
return value;
}
@Override
- public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue unaryOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
switch (insn.getOpcode()) {
- case INEG:
- case IINC:
- case L2I:
- case F2I:
- case D2I:
- case I2B:
- case I2C:
- case I2S:
- return BasicValue.INT_VALUE;
- case FNEG:
- case I2F:
- case L2F:
- case D2F:
- return BasicValue.FLOAT_VALUE;
- case LNEG:
- case I2L:
- case F2L:
- case D2L:
- return BasicValue.LONG_VALUE;
- case DNEG:
- case I2D:
- case L2D:
- case F2D:
- return BasicValue.DOUBLE_VALUE;
- case IFEQ:
- case IFNE:
- case IFLT:
- case IFGE:
- case IFGT:
- case IFLE:
- case TABLESWITCH:
- case LOOKUPSWITCH:
- case IRETURN:
- case LRETURN:
- case FRETURN:
- case DRETURN:
- case ARETURN:
- case PUTSTATIC:
- return null;
- case GETFIELD:
- return newValue(Type.getType(((FieldInsnNode) insn).desc));
- case NEWARRAY:
- switch (((IntInsnNode) insn).operand) {
- case T_BOOLEAN:
- return newValue(Type.getType("[Z"));
- case T_CHAR:
- return newValue(Type.getType("[C"));
- case T_BYTE:
- return newValue(Type.getType("[B"));
- case T_SHORT:
- return newValue(Type.getType("[S"));
- case T_INT:
- return newValue(Type.getType("[I"));
- case T_FLOAT:
- return newValue(Type.getType("[F"));
- case T_DOUBLE:
- return newValue(Type.getType("[D"));
- case T_LONG:
- return newValue(Type.getType("[J"));
- default:
- throw new AnalyzerException(insn, "Invalid array type");
- }
- case ANEWARRAY:
- String desc = ((TypeInsnNode) insn).desc;
- return newValue(Type.getType("[" + Type.getObjectType(desc)));
- case ARRAYLENGTH:
- return BasicValue.INT_VALUE;
- case ATHROW:
- return null;
- case CHECKCAST:
- desc = ((TypeInsnNode) insn).desc;
- return newValue(Type.getObjectType(desc));
- case INSTANCEOF:
- return BasicValue.INT_VALUE;
- case MONITORENTER:
- case MONITOREXIT:
- case IFNULL:
- case IFNONNULL:
- return null;
+ case INEG:
+ case IINC:
+ case L2I:
+ case F2I:
+ case D2I:
+ case I2B:
+ case I2C:
+ case I2S:
+ return BasicValue.INT_VALUE;
+ case FNEG:
+ case I2F:
+ case L2F:
+ case D2F:
+ return BasicValue.FLOAT_VALUE;
+ case LNEG:
+ case I2L:
+ case F2L:
+ case D2L:
+ return BasicValue.LONG_VALUE;
+ case DNEG:
+ case I2D:
+ case L2D:
+ case F2D:
+ return BasicValue.DOUBLE_VALUE;
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case LRETURN:
+ case FRETURN:
+ case DRETURN:
+ case ARETURN:
+ case PUTSTATIC:
+ return null;
+ case GETFIELD:
+ return newValue(Type.getType(((FieldInsnNode) insn).desc));
+ case NEWARRAY:
+ switch (((IntInsnNode) insn).operand) {
+ case T_BOOLEAN:
+ return newValue(Type.getType("[Z"));
+ case T_CHAR:
+ return newValue(Type.getType("[C"));
+ case T_BYTE:
+ return newValue(Type.getType("[B"));
+ case T_SHORT:
+ return newValue(Type.getType("[S"));
+ case T_INT:
+ return newValue(Type.getType("[I"));
+ case T_FLOAT:
+ return newValue(Type.getType("[F"));
+ case T_DOUBLE:
+ return newValue(Type.getType("[D"));
+ case T_LONG:
+ return newValue(Type.getType("[J"));
default:
- throw new Error("Internal error.");
+ throw new AnalyzerException(insn, "Invalid array type");
+ }
+ case ANEWARRAY:
+ String desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getType("[" + Type.getObjectType(desc)));
+ case ARRAYLENGTH:
+ return BasicValue.INT_VALUE;
+ case ATHROW:
+ return null;
+ case CHECKCAST:
+ desc = ((TypeInsnNode) insn).desc;
+ return newValue(Type.getObjectType(desc));
+ case INSTANCEOF:
+ return BasicValue.INT_VALUE;
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ return null;
+ default:
+ throw new Error("Internal error.");
}
}
@Override
- public BasicValue binaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2) throws AnalyzerException
- {
+ public BasicValue binaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2)
+ throws AnalyzerException {
switch (insn.getOpcode()) {
- case IALOAD:
- case BALOAD:
- case CALOAD:
- case SALOAD:
- case IADD:
- case ISUB:
- case IMUL:
- case IDIV:
- case IREM:
- case ISHL:
- case ISHR:
- case IUSHR:
- case IAND:
- case IOR:
- case IXOR:
- return BasicValue.INT_VALUE;
- case FALOAD:
- case FADD:
- case FSUB:
- case FMUL:
- case FDIV:
- case FREM:
- return BasicValue.FLOAT_VALUE;
- case LALOAD:
- case LADD:
- case LSUB:
- case LMUL:
- case LDIV:
- case LREM:
- case LSHL:
- case LSHR:
- case LUSHR:
- case LAND:
- case LOR:
- case LXOR:
- return BasicValue.LONG_VALUE;
- case DALOAD:
- case DADD:
- case DSUB:
- case DMUL:
- case DDIV:
- case DREM:
- return BasicValue.DOUBLE_VALUE;
- case AALOAD:
- return BasicValue.REFERENCE_VALUE;
- case LCMP:
- case FCMPL:
- case FCMPG:
- case DCMPL:
- case DCMPG:
- return BasicValue.INT_VALUE;
- case IF_ICMPEQ:
- case IF_ICMPNE:
- case IF_ICMPLT:
- case IF_ICMPGE:
- case IF_ICMPGT:
- case IF_ICMPLE:
- case IF_ACMPEQ:
- case IF_ACMPNE:
- case PUTFIELD:
- return null;
- default:
- throw new Error("Internal error.");
+ case IALOAD:
+ case BALOAD:
+ case CALOAD:
+ case SALOAD:
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ return BasicValue.INT_VALUE;
+ case FALOAD:
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ return BasicValue.FLOAT_VALUE;
+ case LALOAD:
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ return BasicValue.LONG_VALUE;
+ case DALOAD:
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ return BasicValue.DOUBLE_VALUE;
+ case AALOAD:
+ return BasicValue.REFERENCE_VALUE;
+ case LCMP:
+ case FCMPL:
+ case FCMPG:
+ case DCMPL:
+ case DCMPG:
+ return BasicValue.INT_VALUE;
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ case PUTFIELD:
+ return null;
+ default:
+ throw new Error("Internal error.");
}
}
@Override
- public BasicValue ternaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2,
- final BasicValue value3) throws AnalyzerException
- {
+ public BasicValue ternaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException {
return null;
}
@Override
- public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
- throws AnalyzerException
- {
+ public BasicValue naryOperation(final AbstractInsnNode insn,
+ final List<? extends BasicValue> values) throws AnalyzerException {
int opcode = insn.getOpcode();
if (opcode == MULTIANEWARRAY) {
return newValue(Type.getType(((MultiANewArrayInsnNode) insn).desc));
- } else if (opcode == INVOKEDYNAMIC){
- return newValue(Type.getReturnType(((InvokeDynamicInsnNode) insn).desc));
+ } else if (opcode == INVOKEDYNAMIC) {
+ return newValue(Type
+ .getReturnType(((InvokeDynamicInsnNode) insn).desc));
} else {
return newValue(Type.getReturnType(((MethodInsnNode) insn).desc));
}
}
@Override
- public void returnOperation(
- final AbstractInsnNode insn,
- final BasicValue value,
- final BasicValue expected) throws AnalyzerException
- {
+ public void returnOperation(final AbstractInsnNode insn,
+ final BasicValue value, final BasicValue expected)
+ throws AnalyzerException {
}
@Override
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
index 6c449db9b0..439941fb9f 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicValue.java
@@ -48,11 +48,14 @@ public class BasicValue implements Value {
public static final BasicValue LONG_VALUE = new BasicValue(Type.LONG_TYPE);
- public static final BasicValue DOUBLE_VALUE = new BasicValue(Type.DOUBLE_TYPE);
+ public static final BasicValue DOUBLE_VALUE = new BasicValue(
+ Type.DOUBLE_TYPE);
- public static final BasicValue REFERENCE_VALUE = new BasicValue(Type.getObjectType("java/lang/Object"));
+ public static final BasicValue REFERENCE_VALUE = new BasicValue(
+ Type.getObjectType("java/lang/Object"));
- public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(Type.VOID_TYPE);
+ public static final BasicValue RETURNADDRESS_VALUE = new BasicValue(
+ Type.VOID_TYPE);
private final Type type;
diff --git a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
index 9297dd9294..71666edb74 100644
--- a/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/BasicVerifier.java
@@ -55,47 +55,41 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue copyOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue copyOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
Value expected;
switch (insn.getOpcode()) {
- case ILOAD:
- case ISTORE:
- expected = BasicValue.INT_VALUE;
- break;
- case FLOAD:
- case FSTORE:
- expected = BasicValue.FLOAT_VALUE;
- break;
- case LLOAD:
- case LSTORE:
- expected = BasicValue.LONG_VALUE;
- break;
- case DLOAD:
- case DSTORE:
- expected = BasicValue.DOUBLE_VALUE;
- break;
- case ALOAD:
- if (!value.isReference()) {
- throw new AnalyzerException(insn,
- null,
- "an object reference",
- value);
- }
- return value;
- case ASTORE:
- if (!value.isReference()
- && !BasicValue.RETURNADDRESS_VALUE.equals(value))
- {
- throw new AnalyzerException(insn,
- null,
- "an object reference or a return address",
- value);
- }
- return value;
- default:
- return value;
+ case ILOAD:
+ case ISTORE:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FLOAD:
+ case FSTORE:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LLOAD:
+ case LSTORE:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DLOAD:
+ case DSTORE:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case ALOAD:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn, null, "an object reference",
+ value);
+ }
+ return value;
+ case ASTORE:
+ if (!value.isReference()
+ && !BasicValue.RETURNADDRESS_VALUE.equals(value)) {
+ throw new AnalyzerException(insn, null,
+ "an object reference or a return address", value);
+ }
+ return value;
+ default:
+ return value;
}
if (!expected.equals(value)) {
throw new AnalyzerException(insn, null, expected, value);
@@ -104,91 +98,85 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue unaryOperation(final AbstractInsnNode insn, final BasicValue value)
- throws AnalyzerException
- {
+ public BasicValue unaryOperation(final AbstractInsnNode insn,
+ final BasicValue value) throws AnalyzerException {
BasicValue expected;
switch (insn.getOpcode()) {
- case INEG:
- case IINC:
- case I2F:
- case I2L:
- case I2D:
- case I2B:
- case I2C:
- case I2S:
- case IFEQ:
- case IFNE:
- case IFLT:
- case IFGE:
- case IFGT:
- case IFLE:
- case TABLESWITCH:
- case LOOKUPSWITCH:
- case IRETURN:
- case NEWARRAY:
- case ANEWARRAY:
- expected = BasicValue.INT_VALUE;
- break;
- case FNEG:
- case F2I:
- case F2L:
- case F2D:
- case FRETURN:
- expected = BasicValue.FLOAT_VALUE;
- break;
- case LNEG:
- case L2I:
- case L2F:
- case L2D:
- case LRETURN:
- expected = BasicValue.LONG_VALUE;
- break;
- case DNEG:
- case D2I:
- case D2F:
- case D2L:
- case DRETURN:
- expected = BasicValue.DOUBLE_VALUE;
- break;
- case GETFIELD:
- expected = newValue(Type.getObjectType(((FieldInsnNode) insn).owner));
- break;
- case CHECKCAST:
- if (!value.isReference()) {
- throw new AnalyzerException(insn,
- null,
- "an object reference",
- value);
- }
- return super.unaryOperation(insn, value);
- case ARRAYLENGTH:
- if (!isArrayValue(value)) {
- throw new AnalyzerException(insn,
- null,
- "an array reference",
- value);
- }
- return super.unaryOperation(insn, value);
- case ARETURN:
- case ATHROW:
- case INSTANCEOF:
- case MONITORENTER:
- case MONITOREXIT:
- case IFNULL:
- case IFNONNULL:
- if (!value.isReference()) {
- throw new AnalyzerException(insn,
- null,
- "an object reference",
- value);
- }
- return super.unaryOperation(insn, value);
- case PUTSTATIC:
- expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
- break;
- default:
- throw new Error("Internal error.");
+ case INEG:
+ case IINC:
+ case I2F:
+ case I2L:
+ case I2D:
+ case I2B:
+ case I2C:
+ case I2S:
+ case IFEQ:
+ case IFNE:
+ case IFLT:
+ case IFGE:
+ case IFGT:
+ case IFLE:
+ case TABLESWITCH:
+ case LOOKUPSWITCH:
+ case IRETURN:
+ case NEWARRAY:
+ case ANEWARRAY:
+ expected = BasicValue.INT_VALUE;
+ break;
+ case FNEG:
+ case F2I:
+ case F2L:
+ case F2D:
+ case FRETURN:
+ expected = BasicValue.FLOAT_VALUE;
+ break;
+ case LNEG:
+ case L2I:
+ case L2F:
+ case L2D:
+ case LRETURN:
+ expected = BasicValue.LONG_VALUE;
+ break;
+ case DNEG:
+ case D2I:
+ case D2F:
+ case D2L:
+ case DRETURN:
+ expected = BasicValue.DOUBLE_VALUE;
+ break;
+ case GETFIELD:
+ expected = newValue(Type
+ .getObjectType(((FieldInsnNode) insn).owner));
+ break;
+ case CHECKCAST:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn, null, "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARRAYLENGTH:
+ if (!isArrayValue(value)) {
+ throw new AnalyzerException(insn, null, "an array reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case ARETURN:
+ case ATHROW:
+ case INSTANCEOF:
+ case MONITORENTER:
+ case MONITOREXIT:
+ case IFNULL:
+ case IFNONNULL:
+ if (!value.isReference()) {
+ throw new AnalyzerException(insn, null, "an object reference",
+ value);
+ }
+ return super.unaryOperation(insn, value);
+ case PUTSTATIC:
+ expected = newValue(Type.getType(((FieldInsnNode) insn).desc));
+ break;
+ default:
+ throw new Error("Internal error.");
}
if (!isSubTypeOf(value, expected)) {
throw new AnalyzerException(insn, null, expected, value);
@@ -197,125 +185,125 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue binaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2) throws AnalyzerException
- {
+ public BasicValue binaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2)
+ throws AnalyzerException {
BasicValue expected1;
BasicValue expected2;
switch (insn.getOpcode()) {
- case IALOAD:
- expected1 = newValue(Type.getType("[I"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case BALOAD:
- if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
- expected1 = newValue(Type.getType("[Z"));
- } else {
- expected1 = newValue(Type.getType("[B"));
- }
- expected2 = BasicValue.INT_VALUE;
- break;
- case CALOAD:
- expected1 = newValue(Type.getType("[C"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case SALOAD:
- expected1 = newValue(Type.getType("[S"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case LALOAD:
- expected1 = newValue(Type.getType("[J"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case FALOAD:
- expected1 = newValue(Type.getType("[F"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case DALOAD:
- expected1 = newValue(Type.getType("[D"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case AALOAD:
- expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
- expected2 = BasicValue.INT_VALUE;
- break;
- case IADD:
- case ISUB:
- case IMUL:
- case IDIV:
- case IREM:
- case ISHL:
- case ISHR:
- case IUSHR:
- case IAND:
- case IOR:
- case IXOR:
- case IF_ICMPEQ:
- case IF_ICMPNE:
- case IF_ICMPLT:
- case IF_ICMPGE:
- case IF_ICMPGT:
- case IF_ICMPLE:
- expected1 = BasicValue.INT_VALUE;
- expected2 = BasicValue.INT_VALUE;
- break;
- case FADD:
- case FSUB:
- case FMUL:
- case FDIV:
- case FREM:
- case FCMPL:
- case FCMPG:
- expected1 = BasicValue.FLOAT_VALUE;
- expected2 = BasicValue.FLOAT_VALUE;
- break;
- case LADD:
- case LSUB:
- case LMUL:
- case LDIV:
- case LREM:
- case LAND:
- case LOR:
- case LXOR:
- case LCMP:
- expected1 = BasicValue.LONG_VALUE;
- expected2 = BasicValue.LONG_VALUE;
- break;
- case LSHL:
- case LSHR:
- case LUSHR:
- expected1 = BasicValue.LONG_VALUE;
- expected2 = BasicValue.INT_VALUE;
- break;
- case DADD:
- case DSUB:
- case DMUL:
- case DDIV:
- case DREM:
- case DCMPL:
- case DCMPG:
- expected1 = BasicValue.DOUBLE_VALUE;
- expected2 = BasicValue.DOUBLE_VALUE;
- break;
- case IF_ACMPEQ:
- case IF_ACMPNE:
- expected1 = BasicValue.REFERENCE_VALUE;
- expected2 = BasicValue.REFERENCE_VALUE;
- break;
- case PUTFIELD:
- FieldInsnNode fin = (FieldInsnNode) insn;
- expected1 = newValue(Type.getObjectType(fin.owner));
- expected2 = newValue(Type.getType(fin.desc));
- break;
- default:
- throw new Error("Internal error.");
+ case IALOAD:
+ expected1 = newValue(Type.getType("[I"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case BALOAD:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case CALOAD:
+ expected1 = newValue(Type.getType("[C"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case SALOAD:
+ expected1 = newValue(Type.getType("[S"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case LALOAD:
+ expected1 = newValue(Type.getType("[J"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FALOAD:
+ expected1 = newValue(Type.getType("[F"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DALOAD:
+ expected1 = newValue(Type.getType("[D"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case AALOAD:
+ expected1 = newValue(Type.getType("[Ljava/lang/Object;"));
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case IADD:
+ case ISUB:
+ case IMUL:
+ case IDIV:
+ case IREM:
+ case ISHL:
+ case ISHR:
+ case IUSHR:
+ case IAND:
+ case IOR:
+ case IXOR:
+ case IF_ICMPEQ:
+ case IF_ICMPNE:
+ case IF_ICMPLT:
+ case IF_ICMPGE:
+ case IF_ICMPGT:
+ case IF_ICMPLE:
+ expected1 = BasicValue.INT_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case FADD:
+ case FSUB:
+ case FMUL:
+ case FDIV:
+ case FREM:
+ case FCMPL:
+ case FCMPG:
+ expected1 = BasicValue.FLOAT_VALUE;
+ expected2 = BasicValue.FLOAT_VALUE;
+ break;
+ case LADD:
+ case LSUB:
+ case LMUL:
+ case LDIV:
+ case LREM:
+ case LAND:
+ case LOR:
+ case LXOR:
+ case LCMP:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.LONG_VALUE;
+ break;
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ expected1 = BasicValue.LONG_VALUE;
+ expected2 = BasicValue.INT_VALUE;
+ break;
+ case DADD:
+ case DSUB:
+ case DMUL:
+ case DDIV:
+ case DREM:
+ case DCMPL:
+ case DCMPG:
+ expected1 = BasicValue.DOUBLE_VALUE;
+ expected2 = BasicValue.DOUBLE_VALUE;
+ break;
+ case IF_ACMPEQ:
+ case IF_ACMPNE:
+ expected1 = BasicValue.REFERENCE_VALUE;
+ expected2 = BasicValue.REFERENCE_VALUE;
+ break;
+ case PUTFIELD:
+ FieldInsnNode fin = (FieldInsnNode) insn;
+ expected1 = newValue(Type.getObjectType(fin.owner));
+ expected2 = newValue(Type.getType(fin.desc));
+ break;
+ default:
+ throw new Error("Internal error.");
}
if (!isSubTypeOf(value1, expected1)) {
- throw new AnalyzerException(insn, "First argument", expected1, value1);
+ throw new AnalyzerException(insn, "First argument", expected1,
+ value1);
} else if (!isSubTypeOf(value2, expected2)) {
- throw new AnalyzerException(insn, "Second argument", expected2, value2);
+ throw new AnalyzerException(insn, "Second argument", expected2,
+ value2);
}
if (insn.getOpcode() == AALOAD) {
return getElementValue(value1);
@@ -325,79 +313,73 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public BasicValue ternaryOperation(
- final AbstractInsnNode insn,
- final BasicValue value1,
- final BasicValue value2,
- final BasicValue value3) throws AnalyzerException
- {
+ public BasicValue ternaryOperation(final AbstractInsnNode insn,
+ final BasicValue value1, final BasicValue value2,
+ final BasicValue value3) throws AnalyzerException {
BasicValue expected1;
BasicValue expected3;
switch (insn.getOpcode()) {
- case IASTORE:
- expected1 = newValue(Type.getType("[I"));
- expected3 = BasicValue.INT_VALUE;
- break;
- case BASTORE:
- if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
- expected1 = newValue(Type.getType("[Z"));
- } else {
- expected1 = newValue(Type.getType("[B"));
- }
- expected3 = BasicValue.INT_VALUE;
- break;
- case CASTORE:
- expected1 = newValue(Type.getType("[C"));
- expected3 = BasicValue.INT_VALUE;
- break;
- case SASTORE:
- expected1 = newValue(Type.getType("[S"));
- expected3 = BasicValue.INT_VALUE;
- break;
- case LASTORE:
- expected1 = newValue(Type.getType("[J"));
- expected3 = BasicValue.LONG_VALUE;
- break;
- case FASTORE:
- expected1 = newValue(Type.getType("[F"));
- expected3 = BasicValue.FLOAT_VALUE;
- break;
- case DASTORE:
- expected1 = newValue(Type.getType("[D"));
- expected3 = BasicValue.DOUBLE_VALUE;
- break;
- case AASTORE:
- expected1 = value1;
- expected3 = BasicValue.REFERENCE_VALUE;
- break;
- default:
- throw new Error("Internal error.");
+ case IASTORE:
+ expected1 = newValue(Type.getType("[I"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case BASTORE:
+ if (isSubTypeOf(value1, newValue(Type.getType("[Z")))) {
+ expected1 = newValue(Type.getType("[Z"));
+ } else {
+ expected1 = newValue(Type.getType("[B"));
+ }
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case CASTORE:
+ expected1 = newValue(Type.getType("[C"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case SASTORE:
+ expected1 = newValue(Type.getType("[S"));
+ expected3 = BasicValue.INT_VALUE;
+ break;
+ case LASTORE:
+ expected1 = newValue(Type.getType("[J"));
+ expected3 = BasicValue.LONG_VALUE;
+ break;
+ case FASTORE:
+ expected1 = newValue(Type.getType("[F"));
+ expected3 = BasicValue.FLOAT_VALUE;
+ break;
+ case DASTORE:
+ expected1 = newValue(Type.getType("[D"));
+ expected3 = BasicValue.DOUBLE_VALUE;
+ break;
+ case AASTORE:
+ expected1 = value1;
+ expected3 = BasicValue.REFERENCE_VALUE;
+ break;
+ default:
+ throw new Error("Internal error.");
}
if (!isSubTypeOf(value1, expected1)) {
- throw new AnalyzerException(insn, "First argument", "a " + expected1
- + " array reference", value1);
+ throw new AnalyzerException(insn, "First argument", "a "
+ + expected1 + " array reference", value1);
} else if (!BasicValue.INT_VALUE.equals(value2)) {
throw new AnalyzerException(insn, "Second argument",
- BasicValue.INT_VALUE,
- value2);
+ BasicValue.INT_VALUE, value2);
} else if (!isSubTypeOf(value3, expected3)) {
- throw new AnalyzerException(insn, "Third argument", expected3, value3);
+ throw new AnalyzerException(insn, "Third argument", expected3,
+ value3);
}
return null;
}
@Override
- public BasicValue naryOperation(final AbstractInsnNode insn, final List<? extends BasicValue> values)
- throws AnalyzerException
- {
+ public BasicValue naryOperation(final AbstractInsnNode insn,
+ final List<? extends BasicValue> values) throws AnalyzerException {
int opcode = insn.getOpcode();
if (opcode == MULTIANEWARRAY) {
for (int i = 0; i < values.size(); ++i) {
if (!BasicValue.INT_VALUE.equals(values.get(i))) {
- throw new AnalyzerException(insn,
- null,
- BasicValue.INT_VALUE,
- values.get(i));
+ throw new AnalyzerException(insn, null,
+ BasicValue.INT_VALUE, values.get(i));
}
}
} else {
@@ -407,22 +389,18 @@ public class BasicVerifier extends BasicInterpreter {
Type owner = Type.getObjectType(((MethodInsnNode) insn).owner);
if (!isSubTypeOf(values.get(i++), newValue(owner))) {
throw new AnalyzerException(insn, "Method owner",
- newValue(owner),
- values.get(0));
+ newValue(owner), values.get(0));
}
}
- String desc = (opcode == INVOKEDYNAMIC)?
- ((InvokeDynamicInsnNode) insn).desc:
- ((MethodInsnNode) insn).desc;
+ String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc
+ : ((MethodInsnNode) insn).desc;
Type[] args = Type.getArgumentTypes(desc);
while (i < values.size()) {
BasicValue expected = newValue(args[j++]);
BasicValue encountered = values.get(i++);
if (!isSubTypeOf(encountered, expected)) {
- throw new AnalyzerException(insn,
- "Argument " + j,
- expected,
- encountered);
+ throw new AnalyzerException(insn, "Argument " + j,
+ expected, encountered);
}
}
}
@@ -430,16 +408,12 @@ public class BasicVerifier extends BasicInterpreter {
}
@Override
- public void returnOperation(
- final AbstractInsnNode insn,
- final BasicValue value,
- final BasicValue expected) throws AnalyzerException
- {
+ public void returnOperation(final AbstractInsnNode insn,
+ final BasicValue value, final BasicValue expected)
+ throws AnalyzerException {
if (!isSubTypeOf(value, expected)) {
- throw new AnalyzerException(insn,
- "Incompatible return type",
- expected,
- value);
+ throw new AnalyzerException(insn, "Incompatible return type",
+ expected, value);
}
}
@@ -448,12 +422,12 @@ public class BasicVerifier extends BasicInterpreter {
}
protected BasicValue getElementValue(final BasicValue objectArrayValue)
- throws AnalyzerException
- {
+ throws AnalyzerException {
return BasicValue.REFERENCE_VALUE;
}
- protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ protected boolean isSubTypeOf(final BasicValue value,
+ final BasicValue expected) {
return value.equals(expected);
}
}
diff --git a/src/asm/scala/tools/asm/tree/analysis/Frame.java b/src/asm/scala/tools/asm/tree/analysis/Frame.java
index fe19c2c9ae..0d92edc4d6 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Frame.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Frame.java
@@ -44,10 +44,11 @@ import scala.tools.asm.tree.VarInsnNode;
/**
* A symbolic execution stack frame. A stack frame contains a set of local
* variable slots, and an operand stack. Warning: long and double values are
- * represented by <i>two</i> slots in local variables, and by <i>one</i> slot
- * in the operand stack.
+ * represented by <i>two</i> slots in local variables, and by <i>one</i> slot in
+ * the operand stack.
*
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ * type of the Value used for the analysis.
*
* @author Eric Bruneton
*/
@@ -77,8 +78,10 @@ public class Frame<V extends Value> {
/**
* Constructs a new frame with the given size.
*
- * @param nLocals the maximum number of local variables of the frame.
- * @param nStack the maximum stack size of the frame.
+ * @param nLocals
+ * the maximum number of local variables of the frame.
+ * @param nStack
+ * the maximum stack size of the frame.
*/
public Frame(final int nLocals, final int nStack) {
this.values = (V[]) new Value[nLocals + nStack];
@@ -88,7 +91,8 @@ public class Frame<V extends Value> {
/**
* Constructs a new frame that is identical to the given frame.
*
- * @param src a frame.
+ * @param src
+ * a frame.
*/
public Frame(final Frame<? extends V> src) {
this(src.locals, src.values.length - src.locals);
@@ -98,7 +102,8 @@ public class Frame<V extends Value> {
/**
* Copies the state of the given frame into this frame.
*
- * @param src a frame.
+ * @param src
+ * a frame.
* @return this frame.
*/
public Frame<V> init(final Frame<? extends V> src) {
@@ -111,8 +116,9 @@ public class Frame<V extends Value> {
/**
* Sets the expected return type of the analyzed method.
*
- * @param v the expected return type of the analyzed method, or
- * <tt>null</tt> if the method returns void.
+ * @param v
+ * the expected return type of the analyzed method, or
+ * <tt>null</tt> if the method returns void.
*/
public void setReturn(final V v) {
returnValue = v;
@@ -130,13 +136,16 @@ public class Frame<V extends Value> {
/**
* Returns the value of the given local variable.
*
- * @param i a local variable index.
+ * @param i
+ * a local variable index.
* @return the value of the given local variable.
- * @throws IndexOutOfBoundsException if the variable does not exist.
+ * @throws IndexOutOfBoundsException
+ * if the variable does not exist.
*/
public V getLocal(final int i) throws IndexOutOfBoundsException {
if (i >= locals) {
- throw new IndexOutOfBoundsException("Trying to access an inexistant local variable");
+ throw new IndexOutOfBoundsException(
+ "Trying to access an inexistant local variable");
}
return values[i];
}
@@ -144,15 +153,18 @@ public class Frame<V extends Value> {
/**
* Sets the value of the given local variable.
*
- * @param i a local variable index.
- * @param value the new value of this local variable.
- * @throws IndexOutOfBoundsException if the variable does not exist.
+ * @param i
+ * a local variable index.
+ * @param value
+ * the new value of this local variable.
+ * @throws IndexOutOfBoundsException
+ * if the variable does not exist.
*/
public void setLocal(final int i, final V value)
- throws IndexOutOfBoundsException
- {
+ throws IndexOutOfBoundsException {
if (i >= locals) {
- throw new IndexOutOfBoundsException("Trying to access an inexistant local variable "+i);
+ throw new IndexOutOfBoundsException(
+ "Trying to access an inexistant local variable " + i);
}
values[i] = value;
}
@@ -170,10 +182,11 @@ public class Frame<V extends Value> {
/**
* Returns the value of the given operand stack slot.
*
- * @param i the index of an operand stack slot.
+ * @param i
+ * the index of an operand stack slot.
* @return the value of the given operand stack slot.
- * @throws IndexOutOfBoundsException if the operand stack slot does not
- * exist.
+ * @throws IndexOutOfBoundsException
+ * if the operand stack slot does not exist.
*/
public V getStack(final int i) throws IndexOutOfBoundsException {
return values[i + locals];
@@ -190,11 +203,13 @@ public class Frame<V extends Value> {
* Pops a value from the operand stack of this frame.
*
* @return the value that has been popped from the stack.
- * @throws IndexOutOfBoundsException if the operand stack is empty.
+ * @throws IndexOutOfBoundsException
+ * if the operand stack is empty.
*/
public V pop() throws IndexOutOfBoundsException {
if (top == 0) {
- throw new IndexOutOfBoundsException("Cannot pop operand off an empty stack.");
+ throw new IndexOutOfBoundsException(
+ "Cannot pop operand off an empty stack.");
}
return values[--top + locals];
}
@@ -202,466 +217,469 @@ public class Frame<V extends Value> {
/**
* Pushes a value into the operand stack of this frame.
*
- * @param value the value that must be pushed into the stack.
- * @throws IndexOutOfBoundsException if the operand stack is full.
+ * @param value
+ * the value that must be pushed into the stack.
+ * @throws IndexOutOfBoundsException
+ * if the operand stack is full.
*/
public void push(final V value) throws IndexOutOfBoundsException {
if (top + locals >= values.length) {
- throw new IndexOutOfBoundsException("Insufficient maximum stack size.");
+ throw new IndexOutOfBoundsException(
+ "Insufficient maximum stack size.");
}
values[top++ + locals] = value;
}
- public void execute(
- final AbstractInsnNode insn,
- final Interpreter<V> interpreter) throws AnalyzerException
- {
+ public void execute(final AbstractInsnNode insn,
+ final Interpreter<V> interpreter) throws AnalyzerException {
V value1, value2, value3, value4;
List<V> values;
int var;
switch (insn.getOpcode()) {
- case Opcodes.NOP:
- break;
- case Opcodes.ACONST_NULL:
- case Opcodes.ICONST_M1:
- case Opcodes.ICONST_0:
- case Opcodes.ICONST_1:
- case Opcodes.ICONST_2:
- case Opcodes.ICONST_3:
- case Opcodes.ICONST_4:
- case Opcodes.ICONST_5:
- case Opcodes.LCONST_0:
- case Opcodes.LCONST_1:
- case Opcodes.FCONST_0:
- case Opcodes.FCONST_1:
- case Opcodes.FCONST_2:
- case Opcodes.DCONST_0:
- case Opcodes.DCONST_1:
- case Opcodes.BIPUSH:
- case Opcodes.SIPUSH:
- case Opcodes.LDC:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.ILOAD:
- case Opcodes.LLOAD:
- case Opcodes.FLOAD:
- case Opcodes.DLOAD:
- case Opcodes.ALOAD:
- push(interpreter.copyOperation(insn,
- getLocal(((VarInsnNode) insn).var)));
- break;
- case Opcodes.IALOAD:
- case Opcodes.LALOAD:
- case Opcodes.FALOAD:
- case Opcodes.DALOAD:
- case Opcodes.AALOAD:
- case Opcodes.BALOAD:
- case Opcodes.CALOAD:
- case Opcodes.SALOAD:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.ISTORE:
- case Opcodes.LSTORE:
- case Opcodes.FSTORE:
- case Opcodes.DSTORE:
- case Opcodes.ASTORE:
- value1 = interpreter.copyOperation(insn, pop());
- var = ((VarInsnNode) insn).var;
- setLocal(var, value1);
- if (value1.getSize() == 2) {
- setLocal(var + 1, interpreter.newValue(null));
+ case Opcodes.NOP:
+ break;
+ case Opcodes.ACONST_NULL:
+ case Opcodes.ICONST_M1:
+ case Opcodes.ICONST_0:
+ case Opcodes.ICONST_1:
+ case Opcodes.ICONST_2:
+ case Opcodes.ICONST_3:
+ case Opcodes.ICONST_4:
+ case Opcodes.ICONST_5:
+ case Opcodes.LCONST_0:
+ case Opcodes.LCONST_1:
+ case Opcodes.FCONST_0:
+ case Opcodes.FCONST_1:
+ case Opcodes.FCONST_2:
+ case Opcodes.DCONST_0:
+ case Opcodes.DCONST_1:
+ case Opcodes.BIPUSH:
+ case Opcodes.SIPUSH:
+ case Opcodes.LDC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.ILOAD:
+ case Opcodes.LLOAD:
+ case Opcodes.FLOAD:
+ case Opcodes.DLOAD:
+ case Opcodes.ALOAD:
+ push(interpreter.copyOperation(insn,
+ getLocal(((VarInsnNode) insn).var)));
+ break;
+ case Opcodes.IALOAD:
+ case Opcodes.LALOAD:
+ case Opcodes.FALOAD:
+ case Opcodes.DALOAD:
+ case Opcodes.AALOAD:
+ case Opcodes.BALOAD:
+ case Opcodes.CALOAD:
+ case Opcodes.SALOAD:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.ISTORE:
+ case Opcodes.LSTORE:
+ case Opcodes.FSTORE:
+ case Opcodes.DSTORE:
+ case Opcodes.ASTORE:
+ value1 = interpreter.copyOperation(insn, pop());
+ var = ((VarInsnNode) insn).var;
+ setLocal(var, value1);
+ if (value1.getSize() == 2) {
+ setLocal(var + 1, interpreter.newValue(null));
+ }
+ if (var > 0) {
+ Value local = getLocal(var - 1);
+ if (local != null && local.getSize() == 2) {
+ setLocal(var - 1, interpreter.newValue(null));
}
- if (var > 0) {
- Value local = getLocal(var - 1);
- if (local != null && local.getSize() == 2) {
- setLocal(var - 1, interpreter.newValue(null));
- }
+ }
+ break;
+ case Opcodes.IASTORE:
+ case Opcodes.LASTORE:
+ case Opcodes.FASTORE:
+ case Opcodes.DASTORE:
+ case Opcodes.AASTORE:
+ case Opcodes.BASTORE:
+ case Opcodes.CASTORE:
+ case Opcodes.SASTORE:
+ value3 = pop();
+ value2 = pop();
+ value1 = pop();
+ interpreter.ternaryOperation(insn, value1, value2, value3);
+ break;
+ case Opcodes.POP:
+ if (pop().getSize() == 2) {
+ throw new AnalyzerException(insn, "Illegal use of POP");
+ }
+ break;
+ case Opcodes.POP2:
+ if (pop().getSize() == 1) {
+ if (pop().getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of POP2");
}
- break;
- case Opcodes.IASTORE:
- case Opcodes.LASTORE:
- case Opcodes.FASTORE:
- case Opcodes.DASTORE:
- case Opcodes.AASTORE:
- case Opcodes.BASTORE:
- case Opcodes.CASTORE:
- case Opcodes.SASTORE:
- value3 = pop();
+ }
+ break;
+ case Opcodes.DUP:
+ value1 = pop();
+ if (value1.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP");
+ }
+ push(value1);
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.DUP_X1:
+ value1 = pop();
+ value2 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of DUP_X1");
+ }
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
+ case Opcodes.DUP_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
value2 = pop();
- value1 = pop();
- interpreter.ternaryOperation(insn, value1, value2, value3);
- break;
- case Opcodes.POP:
- if (pop().getSize() == 2) {
- throw new AnalyzerException(insn, "Illegal use of POP");
- }
- break;
- case Opcodes.POP2:
- if (pop().getSize() == 1) {
- if (pop().getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of POP2");
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value1));
+ push(value3);
+ push(value2);
+ push(value1);
+ break;
}
+ } else {
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
}
- break;
- case Opcodes.DUP:
- value1 = pop();
- if (value1.getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of DUP");
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP_X2");
+ case Opcodes.DUP2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ push(value2);
+ push(value1);
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
}
+ } else {
push(value1);
push(interpreter.copyOperation(insn, value1));
break;
- case Opcodes.DUP_X1:
- value1 = pop();
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2");
+ case Opcodes.DUP2_X1:
+ value1 = pop();
+ if (value1.getSize() == 1) {
value2 = pop();
- if (value1.getSize() != 1 || value2.getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of DUP_X1");
- }
- push(interpreter.copyOperation(insn, value1));
- push(value2);
- push(value1);
- break;
- case Opcodes.DUP_X2:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
- push(interpreter.copyOperation(insn, value1));
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- } else {
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ push(interpreter.copyOperation(insn, value2));
push(interpreter.copyOperation(insn, value1));
+ push(value3);
push(value2);
push(value1);
break;
}
}
- throw new AnalyzerException(insn, "Illegal use of DUP_X2");
- case Opcodes.DUP2:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- push(value2);
- push(value1);
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- break;
- }
- } else {
- push(value1);
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
break;
}
- throw new AnalyzerException(insn, "Illegal use of DUP2");
- case Opcodes.DUP2_X1:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
+ }
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
+ case Opcodes.DUP2_X2:
+ value1 = pop();
+ if (value1.getSize() == 1) {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
+ value4 = pop();
+ if (value4.getSize() == 1) {
push(interpreter.copyOperation(insn, value2));
push(interpreter.copyOperation(insn, value1));
+ push(value4);
push(value3);
push(value2);
push(value1);
break;
}
- }
- } else {
- value2 = pop();
- if (value2.getSize() == 1) {
+ } else {
+ push(interpreter.copyOperation(insn, value2));
push(interpreter.copyOperation(insn, value1));
+ push(value3);
push(value2);
push(value1);
break;
}
}
- throw new AnalyzerException(insn, "Illegal use of DUP2_X1");
- case Opcodes.DUP2_X2:
- value1 = pop();
- if (value1.getSize() == 1) {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
- value4 = pop();
- if (value4.getSize() == 1) {
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- push(value4);
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- } else {
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- }
- } else {
- value2 = pop();
- if (value2.getSize() == 1) {
- value3 = pop();
- if (value3.getSize() == 1) {
- push(interpreter.copyOperation(insn, value1));
- push(value3);
- push(value2);
- push(value1);
- break;
- }
- } else {
+ } else {
+ value2 = pop();
+ if (value2.getSize() == 1) {
+ value3 = pop();
+ if (value3.getSize() == 1) {
push(interpreter.copyOperation(insn, value1));
+ push(value3);
push(value2);
push(value1);
break;
}
- }
- throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
- case Opcodes.SWAP:
- value2 = pop();
- value1 = pop();
- if (value1.getSize() != 1 || value2.getSize() != 1) {
- throw new AnalyzerException(insn, "Illegal use of SWAP");
- }
- push(interpreter.copyOperation(insn, value2));
- push(interpreter.copyOperation(insn, value1));
- break;
- case Opcodes.IADD:
- case Opcodes.LADD:
- case Opcodes.FADD:
- case Opcodes.DADD:
- case Opcodes.ISUB:
- case Opcodes.LSUB:
- case Opcodes.FSUB:
- case Opcodes.DSUB:
- case Opcodes.IMUL:
- case Opcodes.LMUL:
- case Opcodes.FMUL:
- case Opcodes.DMUL:
- case Opcodes.IDIV:
- case Opcodes.LDIV:
- case Opcodes.FDIV:
- case Opcodes.DDIV:
- case Opcodes.IREM:
- case Opcodes.LREM:
- case Opcodes.FREM:
- case Opcodes.DREM:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.INEG:
- case Opcodes.LNEG:
- case Opcodes.FNEG:
- case Opcodes.DNEG:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.ISHL:
- case Opcodes.LSHL:
- case Opcodes.ISHR:
- case Opcodes.LSHR:
- case Opcodes.IUSHR:
- case Opcodes.LUSHR:
- case Opcodes.IAND:
- case Opcodes.LAND:
- case Opcodes.IOR:
- case Opcodes.LOR:
- case Opcodes.IXOR:
- case Opcodes.LXOR:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.IINC:
- var = ((IincInsnNode) insn).var;
- setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
- break;
- case Opcodes.I2L:
- case Opcodes.I2F:
- case Opcodes.I2D:
- case Opcodes.L2I:
- case Opcodes.L2F:
- case Opcodes.L2D:
- case Opcodes.F2I:
- case Opcodes.F2L:
- case Opcodes.F2D:
- case Opcodes.D2I:
- case Opcodes.D2L:
- case Opcodes.D2F:
- case Opcodes.I2B:
- case Opcodes.I2C:
- case Opcodes.I2S:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.LCMP:
- case Opcodes.FCMPL:
- case Opcodes.FCMPG:
- case Opcodes.DCMPL:
- case Opcodes.DCMPG:
- value2 = pop();
- value1 = pop();
- push(interpreter.binaryOperation(insn, value1, value2));
- break;
- case Opcodes.IFEQ:
- case Opcodes.IFNE:
- case Opcodes.IFLT:
- case Opcodes.IFGE:
- case Opcodes.IFGT:
- case Opcodes.IFLE:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.IF_ICMPEQ:
- case Opcodes.IF_ICMPNE:
- case Opcodes.IF_ICMPLT:
- case Opcodes.IF_ICMPGE:
- case Opcodes.IF_ICMPGT:
- case Opcodes.IF_ICMPLE:
- case Opcodes.IF_ACMPEQ:
- case Opcodes.IF_ACMPNE:
- value2 = pop();
- value1 = pop();
- interpreter.binaryOperation(insn, value1, value2);
- break;
- case Opcodes.GOTO:
- break;
- case Opcodes.JSR:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.RET:
- break;
- case Opcodes.TABLESWITCH:
- case Opcodes.LOOKUPSWITCH:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.IRETURN:
- case Opcodes.LRETURN:
- case Opcodes.FRETURN:
- case Opcodes.DRETURN:
- case Opcodes.ARETURN:
- value1 = pop();
- interpreter.unaryOperation(insn, value1);
- interpreter.returnOperation(insn, value1, returnValue);
- break;
- case Opcodes.RETURN:
- if (returnValue != null) {
- throw new AnalyzerException(insn, "Incompatible return type");
- }
- break;
- case Opcodes.GETSTATIC:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.PUTSTATIC:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.GETFIELD:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.PUTFIELD:
- value2 = pop();
- value1 = pop();
- interpreter.binaryOperation(insn, value1, value2);
- break;
- case Opcodes.INVOKEVIRTUAL:
- case Opcodes.INVOKESPECIAL:
- case Opcodes.INVOKESTATIC:
- case Opcodes.INVOKEINTERFACE: {
- values = new ArrayList<V>();
- String desc = ((MethodInsnNode) insn).desc;
- for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
- values.add(0, pop());
- }
- if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
- values.add(0, pop());
- }
- if (Type.getReturnType(desc) == Type.VOID_TYPE) {
- interpreter.naryOperation(insn, values);
} else {
- push(interpreter.naryOperation(insn, values));
+ push(interpreter.copyOperation(insn, value1));
+ push(value2);
+ push(value1);
+ break;
}
- break;
}
- case Opcodes.INVOKEDYNAMIC: {
- values = new ArrayList<V>();
- String desc = ((InvokeDynamicInsnNode) insn).desc;
- for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
- values.add(0, pop());
- }
- if (Type.getReturnType(desc) == Type.VOID_TYPE) {
- interpreter.naryOperation(insn, values);
- } else {
- push(interpreter.naryOperation(insn, values));
- }
- break;
+ throw new AnalyzerException(insn, "Illegal use of DUP2_X2");
+ case Opcodes.SWAP:
+ value2 = pop();
+ value1 = pop();
+ if (value1.getSize() != 1 || value2.getSize() != 1) {
+ throw new AnalyzerException(insn, "Illegal use of SWAP");
}
- case Opcodes.NEW:
- push(interpreter.newOperation(insn));
- break;
- case Opcodes.NEWARRAY:
- case Opcodes.ANEWARRAY:
- case Opcodes.ARRAYLENGTH:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.ATHROW:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.CHECKCAST:
- case Opcodes.INSTANCEOF:
- push(interpreter.unaryOperation(insn, pop()));
- break;
- case Opcodes.MONITORENTER:
- case Opcodes.MONITOREXIT:
- interpreter.unaryOperation(insn, pop());
- break;
- case Opcodes.MULTIANEWARRAY:
- values = new ArrayList<V>();
- for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
- values.add(0, pop());
- }
+ push(interpreter.copyOperation(insn, value2));
+ push(interpreter.copyOperation(insn, value1));
+ break;
+ case Opcodes.IADD:
+ case Opcodes.LADD:
+ case Opcodes.FADD:
+ case Opcodes.DADD:
+ case Opcodes.ISUB:
+ case Opcodes.LSUB:
+ case Opcodes.FSUB:
+ case Opcodes.DSUB:
+ case Opcodes.IMUL:
+ case Opcodes.LMUL:
+ case Opcodes.FMUL:
+ case Opcodes.DMUL:
+ case Opcodes.IDIV:
+ case Opcodes.LDIV:
+ case Opcodes.FDIV:
+ case Opcodes.DDIV:
+ case Opcodes.IREM:
+ case Opcodes.LREM:
+ case Opcodes.FREM:
+ case Opcodes.DREM:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.INEG:
+ case Opcodes.LNEG:
+ case Opcodes.FNEG:
+ case Opcodes.DNEG:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ISHL:
+ case Opcodes.LSHL:
+ case Opcodes.ISHR:
+ case Opcodes.LSHR:
+ case Opcodes.IUSHR:
+ case Opcodes.LUSHR:
+ case Opcodes.IAND:
+ case Opcodes.LAND:
+ case Opcodes.IOR:
+ case Opcodes.LOR:
+ case Opcodes.IXOR:
+ case Opcodes.LXOR:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IINC:
+ var = ((IincInsnNode) insn).var;
+ setLocal(var, interpreter.unaryOperation(insn, getLocal(var)));
+ break;
+ case Opcodes.I2L:
+ case Opcodes.I2F:
+ case Opcodes.I2D:
+ case Opcodes.L2I:
+ case Opcodes.L2F:
+ case Opcodes.L2D:
+ case Opcodes.F2I:
+ case Opcodes.F2L:
+ case Opcodes.F2D:
+ case Opcodes.D2I:
+ case Opcodes.D2L:
+ case Opcodes.D2F:
+ case Opcodes.I2B:
+ case Opcodes.I2C:
+ case Opcodes.I2S:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.LCMP:
+ case Opcodes.FCMPL:
+ case Opcodes.FCMPG:
+ case Opcodes.DCMPL:
+ case Opcodes.DCMPG:
+ value2 = pop();
+ value1 = pop();
+ push(interpreter.binaryOperation(insn, value1, value2));
+ break;
+ case Opcodes.IFEQ:
+ case Opcodes.IFNE:
+ case Opcodes.IFLT:
+ case Opcodes.IFGE:
+ case Opcodes.IFGT:
+ case Opcodes.IFLE:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IF_ICMPEQ:
+ case Opcodes.IF_ICMPNE:
+ case Opcodes.IF_ICMPLT:
+ case Opcodes.IF_ICMPGE:
+ case Opcodes.IF_ICMPGT:
+ case Opcodes.IF_ICMPLE:
+ case Opcodes.IF_ACMPEQ:
+ case Opcodes.IF_ACMPNE:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.GOTO:
+ break;
+ case Opcodes.JSR:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.RET:
+ break;
+ case Opcodes.TABLESWITCH:
+ case Opcodes.LOOKUPSWITCH:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.IRETURN:
+ case Opcodes.LRETURN:
+ case Opcodes.FRETURN:
+ case Opcodes.DRETURN:
+ case Opcodes.ARETURN:
+ value1 = pop();
+ interpreter.unaryOperation(insn, value1);
+ interpreter.returnOperation(insn, value1, returnValue);
+ break;
+ case Opcodes.RETURN:
+ if (returnValue != null) {
+ throw new AnalyzerException(insn, "Incompatible return type");
+ }
+ break;
+ case Opcodes.GETSTATIC:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.PUTSTATIC:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.GETFIELD:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.PUTFIELD:
+ value2 = pop();
+ value1 = pop();
+ interpreter.binaryOperation(insn, value1, value2);
+ break;
+ case Opcodes.INVOKEVIRTUAL:
+ case Opcodes.INVOKESPECIAL:
+ case Opcodes.INVOKESTATIC:
+ case Opcodes.INVOKEINTERFACE: {
+ values = new ArrayList<V>();
+ String desc = ((MethodInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (insn.getOpcode() != Opcodes.INVOKESTATIC) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
push(interpreter.naryOperation(insn, values));
- break;
- case Opcodes.IFNULL:
- case Opcodes.IFNONNULL:
- interpreter.unaryOperation(insn, pop());
- break;
- default:
- throw new RuntimeException("Illegal opcode "+insn.getOpcode());
+ }
+ break;
+ }
+ case Opcodes.INVOKEDYNAMIC: {
+ values = new ArrayList<V>();
+ String desc = ((InvokeDynamicInsnNode) insn).desc;
+ for (int i = Type.getArgumentTypes(desc).length; i > 0; --i) {
+ values.add(0, pop());
+ }
+ if (Type.getReturnType(desc) == Type.VOID_TYPE) {
+ interpreter.naryOperation(insn, values);
+ } else {
+ push(interpreter.naryOperation(insn, values));
+ }
+ break;
+ }
+ case Opcodes.NEW:
+ push(interpreter.newOperation(insn));
+ break;
+ case Opcodes.NEWARRAY:
+ case Opcodes.ANEWARRAY:
+ case Opcodes.ARRAYLENGTH:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.ATHROW:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.CHECKCAST:
+ case Opcodes.INSTANCEOF:
+ push(interpreter.unaryOperation(insn, pop()));
+ break;
+ case Opcodes.MONITORENTER:
+ case Opcodes.MONITOREXIT:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ case Opcodes.MULTIANEWARRAY:
+ values = new ArrayList<V>();
+ for (int i = ((MultiANewArrayInsnNode) insn).dims; i > 0; --i) {
+ values.add(0, pop());
+ }
+ push(interpreter.naryOperation(insn, values));
+ break;
+ case Opcodes.IFNULL:
+ case Opcodes.IFNONNULL:
+ interpreter.unaryOperation(insn, pop());
+ break;
+ default:
+ throw new RuntimeException("Illegal opcode " + insn.getOpcode());
}
}
/**
* Merges this frame with the given frame.
*
- * @param frame a frame.
- * @param interpreter the interpreter used to merge values.
+ * @param frame
+ * a frame.
+ * @param interpreter
+ * the interpreter used to merge values.
* @return <tt>true</tt> if this frame has been changed as a result of the
* merge operation, or <tt>false</tt> otherwise.
- * @throws AnalyzerException if the frames have incompatible sizes.
+ * @throws AnalyzerException
+ * if the frames have incompatible sizes.
*/
- public boolean merge(final Frame<? extends V> frame, final Interpreter<V> interpreter)
- throws AnalyzerException
- {
+ public boolean merge(final Frame<? extends V> frame,
+ final Interpreter<V> interpreter) throws AnalyzerException {
if (top != frame.top) {
throw new AnalyzerException(null, "Incompatible stack heights");
}
boolean changes = false;
for (int i = 0; i < locals + top; ++i) {
V v = interpreter.merge(values[i], frame.values[i]);
- if (v != values[i]) {
+ if (!v.equals(values[i])) {
values[i] = v;
changes = true;
}
@@ -672,9 +690,11 @@ public class Frame<V extends Value> {
/**
* Merges this frame with the given frame (case of a RET instruction).
*
- * @param frame a frame
- * @param access the local variables that have been accessed by the
- * subroutine to which the RET instruction corresponds.
+ * @param frame
+ * a frame
+ * @param access
+ * the local variables that have been accessed by the subroutine
+ * to which the RET instruction corresponds.
* @return <tt>true</tt> if this frame has been changed as a result of the
* merge operation, or <tt>false</tt> otherwise.
*/
diff --git a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
index 930c8f4af8..56f4bedc00 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Interpreter.java
@@ -42,7 +42,8 @@ import scala.tools.asm.tree.AbstractInsnNode;
* various semantic interpreters, without needing to duplicate the code to
* simulate the transfer of values.
*
- * @param <V> type of the Value used for the analysis.
+ * @param <V>
+ * type of the Value used for the analysis.
*
* @author Eric Bruneton
*/
@@ -57,12 +58,13 @@ public abstract class Interpreter<V extends Value> {
/**
* Creates a new value that represents the given type.
*
- * Called for method parameters (including <code>this</code>),
- * exception handler variable and with <code>null</code> type
- * for variables reserved by long and double types.
+ * Called for method parameters (including <code>this</code>), exception
+ * handler variable and with <code>null</code> type for variables reserved
+ * by long and double types.
*
- * @param type a primitive or reference type, or <tt>null</tt> to
- * represent an uninitialized value.
+ * @param type
+ * a primitive or reference type, or <tt>null</tt> to represent
+ * an uninitialized value.
* @return a value that represents the given type. The size of the returned
* value must be equal to the size of the given type.
*/
@@ -76,9 +78,11 @@ public abstract class Interpreter<V extends Value> {
* ICONST_5, LCONST_0, LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
* DCONST_1, BIPUSH, SIPUSH, LDC, JSR, GETSTATIC, NEW
*
- * @param insn the bytecode instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V newOperation(AbstractInsnNode insn)
throws AnalyzerException;
@@ -90,11 +94,14 @@ public abstract class Interpreter<V extends Value> {
* ILOAD, LLOAD, FLOAD, DLOAD, ALOAD, ISTORE, LSTORE, FSTORE, DSTORE,
* ASTORE, DUP, DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2, SWAP
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value the value that must be moved by the instruction.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value
+ * the value that must be moved by the instruction.
* @return the result of the interpretation of the given instruction. The
* returned value must be <tt>equal</tt> to the given value.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V copyOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -109,10 +116,13 @@ public abstract class Interpreter<V extends Value> {
* PUTSTATIC, GETFIELD, NEWARRAY, ANEWARRAY, ARRAYLENGTH, ATHROW, CHECKCAST,
* INSTANCEOF, MONITORENTER, MONITOREXIT, IFNULL, IFNONNULL
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value the argument of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value
+ * the argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V unaryOperation(AbstractInsnNode insn, V value)
throws AnalyzerException;
@@ -128,11 +138,15 @@ public abstract class Interpreter<V extends Value> {
* DCMPG, IF_ICMPEQ, IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
* IF_ACMPEQ, IF_ACMPNE, PUTFIELD
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value1 the first argument of the instruction to be interpreted.
- * @param value2 the second argument of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value1
+ * the first argument of the instruction to be interpreted.
+ * @param value2
+ * the second argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
public abstract V binaryOperation(AbstractInsnNode insn, V value1, V value2)
throws AnalyzerException;
@@ -143,18 +157,20 @@ public abstract class Interpreter<V extends Value> {
*
* IASTORE, LASTORE, FASTORE, DASTORE, AASTORE, BASTORE, CASTORE, SASTORE
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value1 the first argument of the instruction to be interpreted.
- * @param value2 the second argument of the instruction to be interpreted.
- * @param value3 the third argument of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value1
+ * the first argument of the instruction to be interpreted.
+ * @param value2
+ * the second argument of the instruction to be interpreted.
+ * @param value3
+ * the third argument of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
- public abstract V ternaryOperation(
- AbstractInsnNode insn,
- V value1,
- V value2,
- V value3) throws AnalyzerException;
+ public abstract V ternaryOperation(AbstractInsnNode insn, V value1,
+ V value2, V value3) throws AnalyzerException;
/**
* Interprets a bytecode instruction with a variable number of arguments.
@@ -163,14 +179,16 @@ public abstract class Interpreter<V extends Value> {
* INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC, INVOKEINTERFACE,
* MULTIANEWARRAY and INVOKEDYNAMIC
*
- * @param insn the bytecode instruction to be interpreted.
- * @param values the arguments of the instruction to be interpreted.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param values
+ * the arguments of the instruction to be interpreted.
* @return the result of the interpretation of the given instruction.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
- public abstract V naryOperation(
- AbstractInsnNode insn,
- List< ? extends V> values) throws AnalyzerException;
+ public abstract V naryOperation(AbstractInsnNode insn,
+ List<? extends V> values) throws AnalyzerException;
/**
* Interprets a bytecode return instruction. This method is called for the
@@ -178,15 +196,17 @@ public abstract class Interpreter<V extends Value> {
*
* IRETURN, LRETURN, FRETURN, DRETURN, ARETURN
*
- * @param insn the bytecode instruction to be interpreted.
- * @param value the argument of the instruction to be interpreted.
- * @param expected the expected return type of the analyzed method.
- * @throws AnalyzerException if an error occured during the interpretation.
+ * @param insn
+ * the bytecode instruction to be interpreted.
+ * @param value
+ * the argument of the instruction to be interpreted.
+ * @param expected
+ * the expected return type of the analyzed method.
+ * @throws AnalyzerException
+ * if an error occured during the interpretation.
*/
- public abstract void returnOperation(
- AbstractInsnNode insn,
- V value,
- V expected) throws AnalyzerException;
+ public abstract void returnOperation(AbstractInsnNode insn, V value,
+ V expected) throws AnalyzerException;
/**
* Merges two values. The merge operation must return a value that
@@ -195,8 +215,10 @@ public abstract class Interpreter<V extends Value> {
* values are integer intervals, the merged value must be an interval that
* contains the previous ones. Likewise for other types of values).
*
- * @param v a value.
- * @param w another value.
+ * @param v
+ * a value.
+ * @param w
+ * another value.
* @return the merged value. If the merged value is equal to <tt>v</tt>,
* this method <i>must</i> return <tt>v</tt>.
*/
diff --git a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
index c4f515d328..eaecd057ea 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SimpleVerifier.java
@@ -79,15 +79,15 @@ public class SimpleVerifier extends BasicVerifier {
* Constructs a new {@link SimpleVerifier} to verify a specific class. This
* class will not be loaded into the JVM since it may be incorrect.
*
- * @param currentClass the class that is verified.
- * @param currentSuperClass the super class of the class that is verified.
- * @param isInterface if the class that is verified is an interface.
+ * @param currentClass
+ * the class that is verified.
+ * @param currentSuperClass
+ * the super class of the class that is verified.
+ * @param isInterface
+ * if the class that is verified is an interface.
*/
- public SimpleVerifier(
- final Type currentClass,
- final Type currentSuperClass,
- final boolean isInterface)
- {
+ public SimpleVerifier(final Type currentClass,
+ final Type currentSuperClass, final boolean isInterface) {
this(currentClass, currentSuperClass, null, isInterface);
}
@@ -95,32 +95,25 @@ public class SimpleVerifier extends BasicVerifier {
* Constructs a new {@link SimpleVerifier} to verify a specific class. This
* class will not be loaded into the JVM since it may be incorrect.
*
- * @param currentClass the class that is verified.
- * @param currentSuperClass the super class of the class that is verified.
- * @param currentClassInterfaces the interfaces implemented by the class
- * that is verified.
- * @param isInterface if the class that is verified is an interface.
+ * @param currentClass
+ * the class that is verified.
+ * @param currentSuperClass
+ * the super class of the class that is verified.
+ * @param currentClassInterfaces
+ * the interfaces implemented by the class that is verified.
+ * @param isInterface
+ * if the class that is verified is an interface.
*/
- public SimpleVerifier(
- final Type currentClass,
- final Type currentSuperClass,
- final List<Type> currentClassInterfaces,
- final boolean isInterface)
- {
- this(ASM4,
- currentClass,
- currentSuperClass,
- currentClassInterfaces,
+ public SimpleVerifier(final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces, final boolean isInterface) {
+ this(ASM4, currentClass, currentSuperClass, currentClassInterfaces,
isInterface);
}
- protected SimpleVerifier(
- final int api,
- final Type currentClass,
- final Type currentSuperClass,
- final List<Type> currentClassInterfaces,
- final boolean isInterface)
- {
+ protected SimpleVerifier(final int api, final Type currentClass,
+ final Type currentSuperClass,
+ final List<Type> currentClassInterfaces, final boolean isInterface) {
super(api);
this.currentClass = currentClass;
this.currentSuperClass = currentSuperClass;
@@ -133,7 +126,8 @@ public class SimpleVerifier extends BasicVerifier {
* classes. This is useful if you are verifying multiple interdependent
* classes.
*
- * @param loader a <code>ClassLoader</code> to use
+ * @param loader
+ * a <code>ClassLoader</code> to use
*/
public void setClassLoader(final ClassLoader loader) {
this.loader = loader;
@@ -148,11 +142,11 @@ public class SimpleVerifier extends BasicVerifier {
boolean isArray = type.getSort() == Type.ARRAY;
if (isArray) {
switch (type.getElementType().getSort()) {
- case Type.BOOLEAN:
- case Type.CHAR:
- case Type.BYTE:
- case Type.SHORT:
- return new BasicValue(type);
+ case Type.BOOLEAN:
+ case Type.CHAR:
+ case Type.BYTE:
+ case Type.SHORT:
+ return new BasicValue(type);
}
}
@@ -181,8 +175,7 @@ public class SimpleVerifier extends BasicVerifier {
@Override
protected BasicValue getElementValue(final BasicValue objectArrayValue)
- throws AnalyzerException
- {
+ throws AnalyzerException {
Type arrayType = objectArrayValue.getType();
if (arrayType != null) {
if (arrayType.getSort() == Type.ARRAY) {
@@ -196,28 +189,28 @@ public class SimpleVerifier extends BasicVerifier {
}
@Override
- protected boolean isSubTypeOf(final BasicValue value, final BasicValue expected) {
+ protected boolean isSubTypeOf(final BasicValue value,
+ final BasicValue expected) {
Type expectedType = expected.getType();
Type type = value.getType();
switch (expectedType.getSort()) {
- case Type.INT:
- case Type.FLOAT:
- case Type.LONG:
- case Type.DOUBLE:
- return type.equals(expectedType);
- case Type.ARRAY:
- case Type.OBJECT:
- if ("Lnull;".equals(type.getDescriptor())) {
- return true;
- } else if (type.getSort() == Type.OBJECT
- || type.getSort() == Type.ARRAY)
- {
- return isAssignableFrom(expectedType, type);
- } else {
- return false;
- }
- default:
- throw new Error("Internal error");
+ case Type.INT:
+ case Type.FLOAT:
+ case Type.LONG:
+ case Type.DOUBLE:
+ return type.equals(expectedType);
+ case Type.ARRAY:
+ case Type.OBJECT:
+ if ("Lnull;".equals(type.getDescriptor())) {
+ return true;
+ } else if (type.getSort() == Type.OBJECT
+ || type.getSort() == Type.ARRAY) {
+ return isAssignableFrom(expectedType, type);
+ } else {
+ return false;
+ }
+ default:
+ throw new Error("Internal error");
}
}
@@ -227,11 +220,9 @@ public class SimpleVerifier extends BasicVerifier {
Type t = v.getType();
Type u = w.getType();
if (t != null
- && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY))
- {
+ && (t.getSort() == Type.OBJECT || t.getSort() == Type.ARRAY)) {
if (u != null
- && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY))
- {
+ && (u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY)) {
if ("Lnull;".equals(t.getDescriptor())) {
return w;
}
@@ -288,7 +279,8 @@ public class SimpleVerifier extends BasicVerifier {
return false;
} else {
if (isInterface) {
- return u.getSort() == Type.OBJECT || u.getSort() == Type.ARRAY;
+ return u.getSort() == Type.OBJECT
+ || u.getSort() == Type.ARRAY;
}
return isAssignableFrom(t, getSuperClass(u));
}
@@ -318,8 +310,7 @@ public class SimpleVerifier extends BasicVerifier {
try {
if (t.getSort() == Type.ARRAY) {
return Class.forName(t.getDescriptor().replace('/', '.'),
- false,
- loader);
+ false, loader);
}
return Class.forName(t.getClassName(), false, loader);
} catch (ClassNotFoundException e) {
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
index 067200b51e..a68086c073 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceInterpreter.java
@@ -47,8 +47,7 @@ import scala.tools.asm.tree.MethodInsnNode;
* @author Eric Bruneton
*/
public class SourceInterpreter extends Interpreter<SourceValue> implements
- Opcodes
-{
+ Opcodes {
public SourceInterpreter() {
super(ASM4);
@@ -70,125 +69,118 @@ public class SourceInterpreter extends Interpreter<SourceValue> implements
public SourceValue newOperation(final AbstractInsnNode insn) {
int size;
switch (insn.getOpcode()) {
- case LCONST_0:
- case LCONST_1:
- case DCONST_0:
- case DCONST_1:
- size = 2;
- break;
- case LDC:
- Object cst = ((LdcInsnNode) insn).cst;
- size = cst instanceof Long || cst instanceof Double ? 2 : 1;
- break;
- case GETSTATIC:
- size = Type.getType(((FieldInsnNode) insn).desc).getSize();
- break;
- default:
- size = 1;
+ case LCONST_0:
+ case LCONST_1:
+ case DCONST_0:
+ case DCONST_1:
+ size = 2;
+ break;
+ case LDC:
+ Object cst = ((LdcInsnNode) insn).cst;
+ size = cst instanceof Long || cst instanceof Double ? 2 : 1;
+ break;
+ case GETSTATIC:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
}
return new SourceValue(size, insn);
}
@Override
- public SourceValue copyOperation(final AbstractInsnNode insn, final SourceValue value) {
+ public SourceValue copyOperation(final AbstractInsnNode insn,
+ final SourceValue value) {
return new SourceValue(value.getSize(), insn);
}
@Override
- public SourceValue unaryOperation(final AbstractInsnNode insn, final SourceValue value)
- {
+ public SourceValue unaryOperation(final AbstractInsnNode insn,
+ final SourceValue value) {
int size;
switch (insn.getOpcode()) {
- case LNEG:
- case DNEG:
- case I2L:
- case I2D:
- case L2D:
- case F2L:
- case F2D:
- case D2L:
- size = 2;
- break;
- case GETFIELD:
- size = Type.getType(((FieldInsnNode) insn).desc).getSize();
- break;
- default:
- size = 1;
+ case LNEG:
+ case DNEG:
+ case I2L:
+ case I2D:
+ case L2D:
+ case F2L:
+ case F2D:
+ case D2L:
+ size = 2;
+ break;
+ case GETFIELD:
+ size = Type.getType(((FieldInsnNode) insn).desc).getSize();
+ break;
+ default:
+ size = 1;
}
return new SourceValue(size, insn);
}
@Override
- public SourceValue binaryOperation(
- final AbstractInsnNode insn,
- final SourceValue value1,
- final SourceValue value2)
- {
+ public SourceValue binaryOperation(final AbstractInsnNode insn,
+ final SourceValue value1, final SourceValue value2) {
int size;
switch (insn.getOpcode()) {
- case LALOAD:
- case DALOAD:
- case LADD:
- case DADD:
- case LSUB:
- case DSUB:
- case LMUL:
- case DMUL:
- case LDIV:
- case DDIV:
- case LREM:
- case DREM:
- case LSHL:
- case LSHR:
- case LUSHR:
- case LAND:
- case LOR:
- case LXOR:
- size = 2;
- break;
- default:
- size = 1;
+ case LALOAD:
+ case DALOAD:
+ case LADD:
+ case DADD:
+ case LSUB:
+ case DSUB:
+ case LMUL:
+ case DMUL:
+ case LDIV:
+ case DDIV:
+ case LREM:
+ case DREM:
+ case LSHL:
+ case LSHR:
+ case LUSHR:
+ case LAND:
+ case LOR:
+ case LXOR:
+ size = 2;
+ break;
+ default:
+ size = 1;
}
return new SourceValue(size, insn);
}
@Override
- public SourceValue ternaryOperation(
- final AbstractInsnNode insn,
- final SourceValue value1,
- final SourceValue value2,
- final SourceValue value3)
- {
+ public SourceValue ternaryOperation(final AbstractInsnNode insn,
+ final SourceValue value1, final SourceValue value2,
+ final SourceValue value3) {
return new SourceValue(1, insn);
}
@Override
- public SourceValue naryOperation(final AbstractInsnNode insn, final List<? extends SourceValue> values) {
+ public SourceValue naryOperation(final AbstractInsnNode insn,
+ final List<? extends SourceValue> values) {
int size;
int opcode = insn.getOpcode();
if (opcode == MULTIANEWARRAY) {
size = 1;
} else {
- String desc = (opcode == INVOKEDYNAMIC)?
- ((InvokeDynamicInsnNode) insn).desc:
- ((MethodInsnNode) insn).desc;
+ String desc = (opcode == INVOKEDYNAMIC) ? ((InvokeDynamicInsnNode) insn).desc
+ : ((MethodInsnNode) insn).desc;
size = Type.getReturnType(desc).getSize();
}
return new SourceValue(size, insn);
}
@Override
- public void returnOperation(
- final AbstractInsnNode insn,
- final SourceValue value,
- final SourceValue expected)
- {
+ public void returnOperation(final AbstractInsnNode insn,
+ final SourceValue value, final SourceValue expected) {
}
@Override
public SourceValue merge(final SourceValue d, final SourceValue w) {
if (d.insns instanceof SmallSet && w.insns instanceof SmallSet) {
- Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns).union((SmallSet<AbstractInsnNode>) w.insns);
+ Set<AbstractInsnNode> s = ((SmallSet<AbstractInsnNode>) d.insns)
+ .union((SmallSet<AbstractInsnNode>) w.insns);
if (s == d.insns && d.size == w.size) {
return d;
} else {
diff --git a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
index 57ff212fb2..40d6b68180 100644
--- a/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
+++ b/src/asm/scala/tools/asm/tree/analysis/SourceValue.java
@@ -48,8 +48,8 @@ public class SourceValue implements Value {
/**
* The instructions that can produce this value. For example, for the Java
- * code below, the instructions that can produce the value of <tt>i</tt>
- * at line 5 are the txo ISTORE instructions at line 1 and 3:
+ * code below, the instructions that can produce the value of <tt>i</tt> at
+ * line 5 are the txo ISTORE instructions at line 1 and 3:
*
* <pre>
* 1: i = 0;
@@ -64,7 +64,7 @@ public class SourceValue implements Value {
public final Set<AbstractInsnNode> insns;
public SourceValue(final int size) {
- this(size, SmallSet.<AbstractInsnNode>emptySet());
+ this(size, SmallSet.<AbstractInsnNode> emptySet());
}
public SourceValue(final int size, final AbstractInsnNode insn) {
@@ -84,7 +84,7 @@ public class SourceValue implements Value {
@Override
public boolean equals(final Object value) {
if (!(value instanceof SourceValue)) {
- return false;
+ return false;
}
SourceValue v = (SourceValue) value;
return size == v.size && insns.equals(v.insns);
diff --git a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
index 038880ddcd..d734bbd499 100644
--- a/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
+++ b/src/asm/scala/tools/asm/tree/analysis/Subroutine.java
@@ -51,11 +51,8 @@ class Subroutine {
private Subroutine() {
}
- Subroutine(
- final LabelNode start,
- final int maxLocals,
- final JumpInsnNode caller)
- {
+ Subroutine(final LabelNode start, final int maxLocals,
+ final JumpInsnNode caller) {
this.start = start;
this.access = new boolean[maxLocals];
this.callers = new ArrayList<JumpInsnNode>();
@@ -90,4 +87,4 @@ class Subroutine {
}
return changes;
}
-} \ No newline at end of file
+}
diff --git a/src/asm/scala/tools/asm/util/ASMifiable.java b/src/asm/scala/tools/asm/util/ASMifiable.java
index 6a31dd508f..95cc6e3a74 100644
--- a/src/asm/scala/tools/asm/util/ASMifiable.java
+++ b/src/asm/scala/tools/asm/util/ASMifiable.java
@@ -34,7 +34,7 @@ import java.util.Map;
import scala.tools.asm.Label;
/**
- * An {@link org.objectweb.asm.Attribute Attribute} that can print the ASM code
+ * An {@link scala.tools.asm.Attribute Attribute} that can print the ASM code
* to create an equivalent attribute.
*
* @author Eugene Kuleshov
@@ -44,10 +44,13 @@ public interface ASMifiable {
/**
* Prints the ASM code to create an attribute equal to this attribute.
*
- * @param buf a buffer used for printing Java code.
- * @param varName name of the variable in a printed code used to store
- * attribute instance.
- * @param labelNames map of label instances to their names.
+ * @param buf
+ * a buffer used for printing Java code.
+ * @param varName
+ * name of the variable in a printed code used to store attribute
+ * instance.
+ * @param labelNames
+ * map of label instances to their names.
*/
void asmify(StringBuffer buf, String varName, Map<Label, String> labelNames);
}
diff --git a/src/asm/scala/tools/asm/util/ASMifier.java b/src/asm/scala/tools/asm/util/ASMifier.java
index 5967c877d1..7e6b223853 100644
--- a/src/asm/scala/tools/asm/util/ASMifier.java
+++ b/src/asm/scala/tools/asm/util/ASMifier.java
@@ -91,11 +91,14 @@ public class ASMifier extends Printer {
/**
* Constructs a new {@link ASMifier}.
*
- * @param api the ASM API version implemented by this class. Must be one of
- * {@link Opcodes#ASM4}.
- * @param name the name of the visitor variable in the produced code.
- * @param id identifier of the annotation visitor variable in the produced
- * code.
+ * @param api
+ * the ASM API version implemented by this class. Must be one of
+ * {@link Opcodes#ASM4}.
+ * @param name
+ * the name of the visitor variable in the produced code.
+ * @param id
+ * identifier of the annotation visitor variable in the produced
+ * code.
*/
protected ASMifier(final int api, final String name, final int id) {
super(api);
@@ -105,13 +108,15 @@ public class ASMifier extends Printer {
/**
* Prints the ASM source code to generate the given class to the standard
- * output. <p> Usage: ASMifier [-debug] &lt;binary
- * class name or class file name&gt;
+ * output.
+ * <p>
+ * Usage: ASMifier [-debug] &lt;binary class name or class file name&gt;
*
- * @param args the command line arguments.
+ * @param args
+ * the command line arguments.
*
- * @throws Exception if the class cannot be found, or if an IO exception
- * occurs.
+ * @throws Exception
+ * if the class cannot be found, or if an IO exception occurs.
*/
public static void main(final String[] args) throws Exception {
int i = 0;
@@ -129,22 +134,21 @@ public class ASMifier extends Printer {
}
}
if (!ok) {
- System.err.println("Prints the ASM code to generate the given class.");
+ System.err
+ .println("Prints the ASM code to generate the given class.");
System.err.println("Usage: ASMifier [-debug] "
+ "<fully qualified class name or class file name>");
return;
}
ClassReader cr;
if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
- || args[i].indexOf('/') > -1)
- {
+ || args[i].indexOf('/') > -1) {
cr = new ClassReader(new FileInputStream(args[i]));
} else {
cr = new ClassReader(args[i]);
}
- cr.accept(new TraceClassVisitor(null,
- new ASMifier(),
- new PrintWriter(System.out)), flags);
+ cr.accept(new TraceClassVisitor(null, new ASMifier(), new PrintWriter(
+ System.out)), flags);
}
// ------------------------------------------------------------------------
@@ -152,14 +156,9 @@ public class ASMifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
String simpleName;
int n = name.lastIndexOf('/');
if (n == -1) {
@@ -170,8 +169,8 @@ public class ASMifier extends Printer {
simpleName = name.substring(n + 1);
}
text.add("import java.util.*;\n");
- text.add("import org.objectweb.asm.*;\n");
- text.add("import org.objectweb.asm.attrs.*;\n");
+ text.add("import scala.tools.asm.*;\n");
+ text.add("import scala.tools.asm.attrs.*;\n");
text.add("public class " + simpleName + "Dump implements Opcodes {\n\n");
text.add("public static byte[] dump () throws Exception {\n\n");
text.add("ClassWriter cw = new ClassWriter(0);\n");
@@ -182,30 +181,30 @@ public class ASMifier extends Printer {
buf.setLength(0);
buf.append("cw.visit(");
switch (version) {
- case Opcodes.V1_1:
- buf.append("V1_1");
- break;
- case Opcodes.V1_2:
- buf.append("V1_2");
- break;
- case Opcodes.V1_3:
- buf.append("V1_3");
- break;
- case Opcodes.V1_4:
- buf.append("V1_4");
- break;
- case Opcodes.V1_5:
- buf.append("V1_5");
- break;
- case Opcodes.V1_6:
- buf.append("V1_6");
- break;
- case Opcodes.V1_7:
- buf.append("V1_7");
- break;
- default:
- buf.append(version);
- break;
+ case Opcodes.V1_1:
+ buf.append("V1_1");
+ break;
+ case Opcodes.V1_2:
+ buf.append("V1_2");
+ break;
+ case Opcodes.V1_3:
+ buf.append("V1_3");
+ break;
+ case Opcodes.V1_4:
+ buf.append("V1_4");
+ break;
+ case Opcodes.V1_5:
+ buf.append("V1_5");
+ break;
+ case Opcodes.V1_6:
+ buf.append("V1_6");
+ break;
+ case Opcodes.V1_7:
+ buf.append("V1_7");
+ break;
+ default:
+ buf.append(version);
+ break;
}
buf.append(", ");
appendAccess(access | ACCESS_CLASS);
@@ -242,11 +241,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
buf.setLength(0);
buf.append("cw.visitOuterClass(");
appendConstant(owner);
@@ -259,10 +255,8 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitClassAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitClassAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@@ -272,12 +266,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
buf.setLength(0);
buf.append("cw.visitInnerClass(");
appendConstant(name);
@@ -292,13 +282,8 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public ASMifier visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
buf.setLength(0);
buf.append("{\n");
buf.append("fv = cw.visitField(");
@@ -320,13 +305,8 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public ASMifier visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
buf.setLength(0);
buf.append("{\n");
buf.append("mv = cw.visitMethod(");
@@ -380,11 +360,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
buf.setLength(0);
buf.append("av").append(id).append(".visitEnum(");
appendConstant(buf, name);
@@ -397,10 +374,7 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitAnnotation(
- final String name,
- final String desc)
- {
+ public ASMifier visitAnnotation(final String name, final String desc) {
buf.setLength(0);
buf.append("{\n");
buf.append("AnnotationVisitor av").append(id + 1).append(" = av");
@@ -443,10 +417,8 @@ public class ASMifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public ASMifier visitFieldAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitFieldAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@@ -469,9 +441,7 @@ public class ASMifier extends Printer {
@Override
public ASMifier visitAnnotationDefault() {
buf.setLength(0);
- buf.append("{\n")
- .append("av0 = ")
- .append(name)
+ buf.append("{\n").append("av0 = ").append(name)
.append(".visitAnnotationDefault();\n");
text.add(buf.toString());
ASMifier a = createASMifier("av", 0);
@@ -481,23 +451,17 @@ public class ASMifier extends Printer {
}
@Override
- public ASMifier visitMethodAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitMethodAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@Override
- public ASMifier visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
buf.setLength(0);
- buf.append("{\n")
- .append("av0 = ").append(name).append(".visitParameterAnnotation(")
- .append(parameter)
+ buf.append("{\n").append("av0 = ").append(name)
+ .append(".visitParameterAnnotation(").append(parameter)
.append(", ");
appendConstant(desc);
buf.append(", ").append(visible).append(");\n");
@@ -519,52 +483,47 @@ public class ASMifier extends Printer {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
buf.setLength(0);
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- declareFrameTypes(nLocal, local);
- declareFrameTypes(nStack, stack);
- if (type == Opcodes.F_NEW) {
- buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
- } else {
- buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
- }
- buf.append(nLocal).append(", new Object[] {");
- appendFrameTypes(nLocal, local);
- buf.append("}, ").append(nStack).append(", new Object[] {");
- appendFrameTypes(nStack, stack);
- buf.append('}');
- break;
- case Opcodes.F_APPEND:
- declareFrameTypes(nLocal, local);
- buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
- .append(nLocal)
- .append(", new Object[] {");
- appendFrameTypes(nLocal, local);
- buf.append("}, 0, null");
- break;
- case Opcodes.F_CHOP:
- buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
- .append(nLocal)
- .append(", null, 0, null");
- break;
- case Opcodes.F_SAME:
- buf.append(name).append(".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
- break;
- case Opcodes.F_SAME1:
- declareFrameTypes(1, stack);
- buf.append(name).append(".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
- appendFrameTypes(1, stack);
- buf.append('}');
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ declareFrameTypes(nLocal, local);
+ declareFrameTypes(nStack, stack);
+ if (type == Opcodes.F_NEW) {
+ buf.append(name).append(".visitFrame(Opcodes.F_NEW, ");
+ } else {
+ buf.append(name).append(".visitFrame(Opcodes.F_FULL, ");
+ }
+ buf.append(nLocal).append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, ").append(nStack).append(", new Object[] {");
+ appendFrameTypes(nStack, stack);
+ buf.append('}');
+ break;
+ case Opcodes.F_APPEND:
+ declareFrameTypes(nLocal, local);
+ buf.append(name).append(".visitFrame(Opcodes.F_APPEND,")
+ .append(nLocal).append(", new Object[] {");
+ appendFrameTypes(nLocal, local);
+ buf.append("}, 0, null");
+ break;
+ case Opcodes.F_CHOP:
+ buf.append(name).append(".visitFrame(Opcodes.F_CHOP,")
+ .append(nLocal).append(", null, 0, null");
+ break;
+ case Opcodes.F_SAME:
+ buf.append(name).append(
+ ".visitFrame(Opcodes.F_SAME, 0, null, 0, null");
+ break;
+ case Opcodes.F_SAME1:
+ declareFrameTypes(1, stack);
+ buf.append(name).append(
+ ".visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[] {");
+ appendFrameTypes(1, stack);
+ buf.append('}');
+ break;
}
buf.append(");\n");
text.add(buf.toString());
@@ -573,7 +532,8 @@ public class ASMifier extends Printer {
@Override
public void visitInsn(final int opcode) {
buf.setLength(0);
- buf.append(name).append(".visitInsn(").append(OPCODES[opcode]).append(");\n");
+ buf.append(name).append(".visitInsn(").append(OPCODES[opcode])
+ .append(");\n");
text.add(buf.toString());
}
@@ -584,43 +544,35 @@ public class ASMifier extends Printer {
.append(".visitIntInsn(")
.append(OPCODES[opcode])
.append(", ")
- .append(opcode == Opcodes.NEWARRAY
- ? TYPES[operand]
- : Integer.toString(operand))
- .append(");\n");
+ .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer
+ .toString(operand)).append(");\n");
text.add(buf.toString());
}
@Override
public void visitVarInsn(final int opcode, final int var) {
buf.setLength(0);
- buf.append(name)
- .append(".visitVarInsn(")
- .append(OPCODES[opcode])
- .append(", ")
- .append(var)
- .append(");\n");
+ buf.append(name).append(".visitVarInsn(").append(OPCODES[opcode])
+ .append(", ").append(var).append(");\n");
text.add(buf.toString());
}
@Override
public void visitTypeInsn(final int opcode, final String type) {
buf.setLength(0);
- buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(name).append(".visitTypeInsn(").append(OPCODES[opcode])
+ .append(", ");
appendConstant(type);
buf.append(");\n");
text.add(buf.toString());
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
- buf.append(this.name).append(".visitFieldInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(this.name).append(".visitFieldInsn(")
+ .append(OPCODES[opcode]).append(", ");
appendConstant(owner);
buf.append(", ");
appendConstant(name);
@@ -631,14 +583,11 @@ public class ASMifier extends Printer {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
- buf.append(this.name).append(".visitMethodInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(this.name).append(".visitMethodInsn(")
+ .append(OPCODES[opcode]).append(", ");
appendConstant(owner);
buf.append(", ");
appendConstant(name);
@@ -649,12 +598,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
buf.setLength(0);
buf.append(this.name).append(".visitInvokeDynamicInsn(");
appendConstant(name);
@@ -677,7 +622,8 @@ public class ASMifier extends Printer {
public void visitJumpInsn(final int opcode, final Label label) {
buf.setLength(0);
declareLabel(label);
- buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode]).append(", ");
+ buf.append(name).append(".visitJumpInsn(").append(OPCODES[opcode])
+ .append(", ");
appendLabel(label);
buf.append(");\n");
text.add(buf.toString());
@@ -705,34 +651,22 @@ public class ASMifier extends Printer {
@Override
public void visitIincInsn(final int var, final int increment) {
buf.setLength(0);
- buf.append(name)
- .append(".visitIincInsn(")
- .append(var)
- .append(", ")
- .append(increment)
- .append(");\n");
+ buf.append(name).append(".visitIincInsn(").append(var).append(", ")
+ .append(increment).append(");\n");
text.add(buf.toString());
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
buf.setLength(0);
for (int i = 0; i < labels.length; ++i) {
declareLabel(labels[i]);
}
declareLabel(dflt);
- buf.append(name)
- .append(".visitTableSwitchInsn(")
- .append(min)
- .append(", ")
- .append(max)
- .append(", ");
+ buf.append(name).append(".visitTableSwitchInsn(").append(min)
+ .append(", ").append(max).append(", ");
appendLabel(dflt);
buf.append(", new Label[] {");
for (int i = 0; i < labels.length; ++i) {
@@ -744,11 +678,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
buf.setLength(0);
for (int i = 0; i < labels.length; ++i) {
declareLabel(labels[i]);
@@ -780,12 +711,8 @@ public class ASMifier extends Printer {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
buf.setLength(0);
declareLabel(start);
declareLabel(end);
@@ -803,14 +730,9 @@ public class ASMifier extends Printer {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
buf.setLength(0);
buf.append(this.name).append(".visitLocalVariable(");
appendConstant(name);
@@ -838,12 +760,8 @@ public class ASMifier extends Printer {
@Override
public void visitMaxs(final int maxStack, final int maxLocals) {
buf.setLength(0);
- buf.append(name)
- .append(".visitMaxs(")
- .append(maxStack)
- .append(", ")
- .append(maxLocals)
- .append(");\n");
+ buf.append(name).append(".visitMaxs(").append(maxStack).append(", ")
+ .append(maxLocals).append(");\n");
text.add(buf.toString());
}
@@ -858,14 +776,9 @@ public class ASMifier extends Printer {
// Common methods
// ------------------------------------------------------------------------
- public ASMifier visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public ASMifier visitAnnotation(final String desc, final boolean visible) {
buf.setLength(0);
- buf.append("{\n")
- .append("av0 = ")
- .append(name)
+ buf.append("{\n").append("av0 = ").append(name)
.append(".visitAnnotation(");
appendConstant(desc);
buf.append(", ").append(visible).append(");\n");
@@ -895,15 +808,16 @@ public class ASMifier extends Printer {
// Utility methods
// ------------------------------------------------------------------------
- protected ASMifier createASMifier(final String name, final int id) {
+ protected ASMifier createASMifier(final String name, final int id) {
return new ASMifier(Opcodes.ASM4, name, id);
}
/**
- * Appends a string representation of the given access modifiers to {@link
- * #buf buf}.
+ * Appends a string representation of the given access modifiers to
+ * {@link #buf buf}.
*
- * @param access some access modifiers.
+ * @param access
+ * some access modifiers.
*/
void appendAccess(final int access) {
boolean first = true;
@@ -945,8 +859,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_VOLATILE) != 0
- && (access & ACCESS_FIELD) != 0)
- {
+ && (access & ACCESS_FIELD) != 0) {
if (!first) {
buf.append(" + ");
}
@@ -954,8 +867,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_BRIDGE) != 0 && (access & ACCESS_CLASS) == 0
- && (access & ACCESS_FIELD) == 0)
- {
+ && (access & ACCESS_FIELD) == 0) {
if (!first) {
buf.append(" + ");
}
@@ -963,8 +875,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_VARARGS) != 0 && (access & ACCESS_CLASS) == 0
- && (access & ACCESS_FIELD) == 0)
- {
+ && (access & ACCESS_FIELD) == 0) {
if (!first) {
buf.append(" + ");
}
@@ -972,8 +883,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_TRANSIENT) != 0
- && (access & ACCESS_FIELD) != 0)
- {
+ && (access & ACCESS_FIELD) != 0) {
if (!first) {
buf.append(" + ");
}
@@ -981,8 +891,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_NATIVE) != 0 && (access & ACCESS_CLASS) == 0
- && (access & ACCESS_FIELD) == 0)
- {
+ && (access & ACCESS_FIELD) == 0) {
if (!first) {
buf.append(" + ");
}
@@ -991,8 +900,7 @@ public class ASMifier extends Printer {
}
if ((access & Opcodes.ACC_ENUM) != 0
&& ((access & ACCESS_CLASS) != 0
- || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0))
- {
+ || (access & ACCESS_FIELD) != 0 || (access & ACCESS_INNER) != 0)) {
if (!first) {
buf.append(" + ");
}
@@ -1000,8 +908,7 @@ public class ASMifier extends Printer {
first = false;
}
if ((access & Opcodes.ACC_ANNOTATION) != 0
- && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0))
- {
+ && ((access & ACCESS_CLASS) != 0 || (access & ACCESS_INNER) != 0)) {
if (!first) {
buf.append(" + ");
}
@@ -1052,8 +959,9 @@ public class ASMifier extends Printer {
* Appends a string representation of the given constant to the given
* buffer.
*
- * @param cst an {@link Integer}, {@link Float}, {@link Long},
- * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ * @param cst
+ * an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
*/
protected void appendConstant(final Object cst) {
appendConstant(buf, cst);
@@ -1063,9 +971,11 @@ public class ASMifier extends Printer {
* Appends a string representation of the given constant to the given
* buffer.
*
- * @param buf a string buffer.
- * @param cst an {@link Integer}, {@link Float}, {@link Long},
- * {@link Double} or {@link String} object. May be <tt>null</tt>.
+ * @param buf
+ * a string buffer.
+ * @param cst
+ * an {@link Integer}, {@link Float}, {@link Long},
+ * {@link Double} or {@link String} object. May be <tt>null</tt>.
*/
static void appendConstant(final StringBuffer buf, final Object cst) {
if (cst == null) {
@@ -1079,14 +989,16 @@ public class ASMifier extends Printer {
} else if (cst instanceof Handle) {
buf.append("new Handle(");
Handle h = (Handle) cst;
- buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()]).append(", \"");
+ buf.append("Opcodes.").append(HANDLE_TAG[h.getTag()])
+ .append(", \"");
buf.append(h.getOwner()).append("\", \"");
buf.append(h.getName()).append("\", \"");
buf.append(h.getDesc()).append("\")");
} else if (cst instanceof Byte) {
buf.append("new Byte((byte)").append(cst).append(')');
} else if (cst instanceof Boolean) {
- buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE" : "Boolean.FALSE");
+ buf.append(((Boolean) cst).booleanValue() ? "Boolean.TRUE"
+ : "Boolean.FALSE");
} else if (cst instanceof Short) {
buf.append("new Short((short)").append(cst).append(')');
} else if (cst instanceof Character) {
@@ -1125,8 +1037,7 @@ public class ASMifier extends Printer {
char[] v = (char[]) cst;
buf.append("new char[] {");
for (int i = 0; i < v.length; i++) {
- buf.append(i == 0 ? "" : ",")
- .append("(char)")
+ buf.append(i == 0 ? "" : ",").append("(char)")
.append((int) v[i]);
}
buf.append('}');
@@ -1178,27 +1089,27 @@ public class ASMifier extends Printer {
appendConstant(o[i]);
} else if (o[i] instanceof Integer) {
switch (((Integer) o[i]).intValue()) {
- case 0:
- buf.append("Opcodes.TOP");
- break;
- case 1:
- buf.append("Opcodes.INTEGER");
- break;
- case 2:
- buf.append("Opcodes.FLOAT");
- break;
- case 3:
- buf.append("Opcodes.DOUBLE");
- break;
- case 4:
- buf.append("Opcodes.LONG");
- break;
- case 5:
- buf.append("Opcodes.NULL");
- break;
- case 6:
- buf.append("Opcodes.UNINITIALIZED_THIS");
- break;
+ case 0:
+ buf.append("Opcodes.TOP");
+ break;
+ case 1:
+ buf.append("Opcodes.INTEGER");
+ break;
+ case 2:
+ buf.append("Opcodes.FLOAT");
+ break;
+ case 3:
+ buf.append("Opcodes.DOUBLE");
+ break;
+ case 4:
+ buf.append("Opcodes.LONG");
+ break;
+ case 5:
+ buf.append("Opcodes.NULL");
+ break;
+ case 6:
+ buf.append("Opcodes.UNINITIALIZED_THIS");
+ break;
}
} else {
appendLabel((Label) o[i]);
@@ -1211,7 +1122,8 @@ public class ASMifier extends Printer {
* declaration is of the form "Label lXXX = new Label();". Does nothing if
* the given label has already been declared.
*
- * @param l a label.
+ * @param l
+ * a label.
*/
protected void declareLabel(final Label l) {
if (labelNames == null) {
@@ -1227,10 +1139,11 @@ public class ASMifier extends Printer {
/**
* Appends the name of the given label to {@link #buf buf}. The given label
- * <i>must</i> already have a name. One way to ensure this is to always
- * call {@link #declareLabel declared} before calling this method.
+ * <i>must</i> already have a name. One way to ensure this is to always call
+ * {@link #declareLabel declared} before calling this method.
*
- * @param l a label.
+ * @param l
+ * a label.
*/
protected void appendLabel(final Label l) {
buf.append(labelNames.get(l));
diff --git a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
index 8030c14f2e..f00a8f04a2 100644
--- a/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckAnnotationAdapter.java
@@ -65,8 +65,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
|| value instanceof byte[] || value instanceof boolean[]
|| value instanceof char[] || value instanceof short[]
|| value instanceof int[] || value instanceof long[]
- || value instanceof float[] || value instanceof double[]))
- {
+ || value instanceof float[] || value instanceof double[])) {
throw new IllegalArgumentException("Invalid annotation value");
}
if (value instanceof Type) {
@@ -81,11 +80,8 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
checkEnd();
checkName(name);
CheckMethodAdapter.checkDesc(desc, false);
@@ -98,15 +94,12 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
checkEnd();
checkName(name);
CheckMethodAdapter.checkDesc(desc, false);
- return new CheckAnnotationAdapter(av == null
- ? null
+ return new CheckAnnotationAdapter(av == null ? null
: av.visitAnnotation(name, desc));
}
@@ -114,8 +107,7 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
public AnnotationVisitor visitArray(final String name) {
checkEnd();
checkName(name);
- return new CheckAnnotationAdapter(av == null
- ? null
+ return new CheckAnnotationAdapter(av == null ? null
: av.visitArray(name), false);
}
@@ -130,13 +122,15 @@ public class CheckAnnotationAdapter extends AnnotationVisitor {
private void checkEnd() {
if (end) {
- throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ throw new IllegalStateException(
+ "Cannot call a visit method after visitEnd has been called");
}
}
private void checkName(final String name) {
if (named && name == null) {
- throw new IllegalArgumentException("Annotation value name must not be null");
+ throw new IllegalArgumentException(
+ "Annotation value name must not be null");
}
}
}
diff --git a/src/asm/scala/tools/asm/util/CheckClassAdapter.java b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
index a455322531..0bfa143a95 100644
--- a/src/asm/scala/tools/asm/util/CheckClassAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckClassAdapter.java
@@ -59,10 +59,10 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* <i>only</i> on its arguments, but does <i>not</i> check the <i>sequence</i>
* of method calls. For example, the invalid sequence
* <tt>visitField(ACC_PUBLIC, "i", "I", null)</tt> <tt>visitField(ACC_PUBLIC,
- * "i", "D", null)</tt>
- * will <i>not</i> be detected by this class adapter.
+ * "i", "D", null)</tt> will <i>not</i> be detected by this class adapter.
*
- * <p><code>CheckClassAdapter</code> can be also used to verify bytecode
+ * <p>
+ * <code>CheckClassAdapter</code> can be also used to verify bytecode
* transformations in order to make sure transformed bytecode is sane. For
* example:
*
@@ -80,19 +80,20 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* </pre>
*
* Above code runs transformed bytecode trough the
- * <code>CheckClassAdapter</code>. It won't be exactly the same verification
- * as JVM does, but it run data flow analysis for the code of each method and
+ * <code>CheckClassAdapter</code>. It won't be exactly the same verification as
+ * JVM does, but it run data flow analysis for the code of each method and
* checks that expectations are met for each method instruction.
*
- * <p>If method bytecode has errors, assertion text will show the erroneous
+ * <p>
+ * If method bytecode has errors, assertion text will show the erroneous
* instruction number and dump of the failed method with information about
* locals and stack slot for each instruction. For example (format is -
* insnNumber locals : stack):
*
* <pre>
- * org.objectweb.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
- * at org.objectweb.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
- * at org.objectweb.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
+ * scala.tools.asm.tree.analysis.AnalyzerException: Error at instruction 71: Expected I, but found .
+ * at scala.tools.asm.tree.analysis.Analyzer.analyze(Analyzer.java:289)
+ * at scala.tools.asm.util.CheckClassAdapter.verify(CheckClassAdapter.java:135)
* ...
* remove()V
* 00000 LinkedBlockingQueue$Itr . . . . . . . . :
@@ -114,8 +115,9 @@ import scala.tools.asm.tree.analysis.SimpleVerifier;
* initialized. You can also see that at the beginning of the method (code
* inserted by the transformation) variable 2 is initialized.
*
- * <p>Note that when used like that, <code>CheckClassAdapter.verify()</code>
- * can trigger additional class loading, because it is using
+ * <p>
+ * Note that when used like that, <code>CheckClassAdapter.verify()</code> can
+ * trigger additional class loading, because it is using
* <code>SimpleVerifier</code>.
*
* @author Eric Bruneton
@@ -159,13 +161,15 @@ public class CheckClassAdapter extends ClassVisitor {
private boolean checkDataFlow;
/**
- * Checks a given class. <p> Usage: CheckClassAdapter &lt;binary
- * class name or class file name&gt;
+ * Checks a given class.
+ * <p>
+ * Usage: CheckClassAdapter &lt;binary class name or class file name&gt;
*
- * @param args the command line arguments.
+ * @param args
+ * the command line arguments.
*
- * @throws Exception if the class cannot be found, or if an IO exception
- * occurs.
+ * @throws Exception
+ * if the class cannot be found, or if an IO exception occurs.
*/
public static void main(final String[] args) throws Exception {
if (args.length != 1) {
@@ -187,27 +191,26 @@ public class CheckClassAdapter extends ClassVisitor {
/**
* Checks a given class.
*
- * @param cr a <code>ClassReader</code> that contains bytecode for the
- * analysis.
- * @param loader a <code>ClassLoader</code> which will be used to load
- * referenced classes. This is useful if you are verifiying multiple
- * interdependent classes.
- * @param dump true if bytecode should be printed out not only when errors
- * are found.
- * @param pw write where results going to be printed
+ * @param cr
+ * a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param loader
+ * a <code>ClassLoader</code> which will be used to load
+ * referenced classes. This is useful if you are verifiying
+ * multiple interdependent classes.
+ * @param dump
+ * true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw
+ * write where results going to be printed
*/
- public static void verify(
- final ClassReader cr,
- final ClassLoader loader,
- final boolean dump,
- final PrintWriter pw)
- {
+ public static void verify(final ClassReader cr, final ClassLoader loader,
+ final boolean dump, final PrintWriter pw) {
ClassNode cn = new ClassNode();
cr.accept(new CheckClassAdapter(cn, false), ClassReader.SKIP_DEBUG);
- Type syperType = cn.superName == null
- ? null
- : Type.getObjectType(cn.superName);
+ Type syperType = cn.superName == null ? null : Type
+ .getObjectType(cn.superName);
List<MethodNode> methods = cn.methods;
List<Type> interfaces = new ArrayList<Type>();
@@ -217,9 +220,8 @@ public class CheckClassAdapter extends ClassVisitor {
for (int i = 0; i < methods.size(); ++i) {
MethodNode method = methods.get(i);
- SimpleVerifier verifier = new SimpleVerifier(Type.getObjectType(cn.name),
- syperType,
- interfaces,
+ SimpleVerifier verifier = new SimpleVerifier(
+ Type.getObjectType(cn.name), syperType, interfaces,
(cn.access & Opcodes.ACC_INTERFACE) != 0);
Analyzer<BasicValue> a = new Analyzer<BasicValue>(verifier);
if (loader != null) {
@@ -241,25 +243,22 @@ public class CheckClassAdapter extends ClassVisitor {
/**
* Checks a given class
*
- * @param cr a <code>ClassReader</code> that contains bytecode for the
- * analysis.
- * @param dump true if bytecode should be printed out not only when errors
- * are found.
- * @param pw write where results going to be printed
+ * @param cr
+ * a <code>ClassReader</code> that contains bytecode for the
+ * analysis.
+ * @param dump
+ * true if bytecode should be printed out not only when errors
+ * are found.
+ * @param pw
+ * write where results going to be printed
*/
- public static void verify(
- final ClassReader cr,
- final boolean dump,
- final PrintWriter pw)
- {
+ public static void verify(final ClassReader cr, final boolean dump,
+ final PrintWriter pw) {
verify(cr, null, dump, pw);
}
- static void printAnalyzerResult(
- MethodNode method,
- Analyzer<BasicValue> a,
- final PrintWriter pw)
- {
+ static void printAnalyzerResult(MethodNode method, Analyzer<BasicValue> a,
+ final PrintWriter pw) {
Frame<BasicValue>[] frames = a.getFrames();
Textifier t = new Textifier();
TraceMethodVisitor mv = new TraceMethodVisitor(t);
@@ -310,7 +309,8 @@ public class CheckClassAdapter extends ClassVisitor {
* this constructor</i>. Instead, they must use the
* {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
*
- * @param cv the class visitor to which this adapter must delegate calls.
+ * @param cv
+ * the class visitor to which this adapter must delegate calls.
*/
public CheckClassAdapter(final ClassVisitor cv) {
this(cv, true);
@@ -321,33 +321,34 @@ public class CheckClassAdapter extends ClassVisitor {
* this constructor</i>. Instead, they must use the
* {@link #CheckClassAdapter(int, ClassVisitor, boolean)} version.
*
- * @param cv the class visitor to which this adapter must delegate calls.
- * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
- * <tt>false</tt> to not perform any data flow check (see
- * {@link CheckMethodAdapter}). This option requires valid maxLocals
- * and maxStack values.
+ * @param cv
+ * the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow
+ * <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid
+ * maxLocals and maxStack values.
*/
- public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow)
- {
+ public CheckClassAdapter(final ClassVisitor cv, final boolean checkDataFlow) {
this(Opcodes.ASM4, cv, checkDataFlow);
}
/**
* Constructs a new {@link CheckClassAdapter}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param cv the class visitor to which this adapter must delegate calls.
- * @param checkDataFlow <tt>true</tt> to perform basic data flow checks, or
- * <tt>false</tt> to not perform any data flow check (see
- * {@link CheckMethodAdapter}). This option requires valid maxLocals
- * and maxStack values.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param cv
+ * the class visitor to which this adapter must delegate calls.
+ * @param checkDataFlow
+ * <tt>true</tt> to perform basic data flow checks, or
+ * <tt>false</tt> to not perform any data flow check (see
+ * {@link CheckMethodAdapter}). This option requires valid
+ * maxLocals and maxStack values.
*/
- protected CheckClassAdapter(
- final int api,
- final ClassVisitor cv,
- final boolean checkDataFlow)
- {
+ protected CheckClassAdapter(final int api, final ClassVisitor cv,
+ final boolean checkDataFlow) {
super(api, cv);
this.labels = new HashMap<Label, Integer>();
this.checkDataFlow = checkDataFlow;
@@ -358,14 +359,9 @@ public class CheckClassAdapter extends ClassVisitor {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
if (start) {
throw new IllegalStateException("visit must be called only once");
}
@@ -375,24 +371,25 @@ public class CheckClassAdapter extends ClassVisitor {
+ Opcodes.ACC_SUPER + Opcodes.ACC_INTERFACE
+ Opcodes.ACC_ABSTRACT + Opcodes.ACC_SYNTHETIC
+ Opcodes.ACC_ANNOTATION + Opcodes.ACC_ENUM
- + Opcodes.ACC_DEPRECATED
- + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
if (name == null || !name.endsWith("package-info")) {
CheckMethodAdapter.checkInternalName(name, "class name");
}
if ("java/lang/Object".equals(name)) {
if (superName != null) {
- throw new IllegalArgumentException("The super class name of the Object class must be 'null'");
+ throw new IllegalArgumentException(
+ "The super class name of the Object class must be 'null'");
}
} else {
CheckMethodAdapter.checkInternalName(superName, "super class name");
}
if (signature != null) {
- CheckMethodAdapter.checkClassSignature(signature);
+ checkClassSignature(signature);
}
if ((access & Opcodes.ACC_INTERFACE) != 0) {
if (!"java/lang/Object".equals(superName)) {
- throw new IllegalArgumentException("The super class name of interfaces must be 'java/lang/Object'");
+ throw new IllegalArgumentException(
+ "The super class name of interfaces must be 'java/lang/Object'");
}
}
if (interfaces != null) {
@@ -409,21 +406,20 @@ public class CheckClassAdapter extends ClassVisitor {
public void visitSource(final String file, final String debug) {
checkState();
if (source) {
- throw new IllegalStateException("visitSource can be called only once.");
+ throw new IllegalStateException(
+ "visitSource can be called only once.");
}
source = true;
super.visitSource(file, debug);
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
checkState();
if (outer) {
- throw new IllegalStateException("visitOuterClass can be called only once.");
+ throw new IllegalStateException(
+ "visitOuterClass can be called only once.");
}
outer = true;
if (owner == null) {
@@ -436,12 +432,8 @@ public class CheckClassAdapter extends ClassVisitor {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
checkState();
CheckMethodAdapter.checkInternalName(name, "class name");
if (outerName != null) {
@@ -459,52 +451,44 @@ public class CheckClassAdapter extends ClassVisitor {
}
@Override
- public FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
checkState();
checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ Opcodes.ACC_FINAL + Opcodes.ACC_VOLATILE
+ Opcodes.ACC_TRANSIENT + Opcodes.ACC_SYNTHETIC
- + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED
- + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ + Opcodes.ACC_ENUM + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
CheckMethodAdapter.checkUnqualifiedName(version, name, "field name");
CheckMethodAdapter.checkDesc(desc, false);
if (signature != null) {
- CheckMethodAdapter.checkFieldSignature(signature);
+ checkFieldSignature(signature);
}
if (value != null) {
CheckMethodAdapter.checkConstant(value);
}
- FieldVisitor av = super.visitField(access, name, desc, signature, value);
+ FieldVisitor av = super
+ .visitField(access, name, desc, signature, value);
return new CheckFieldAdapter(av);
}
@Override
- public MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
checkState();
checkAccess(access, Opcodes.ACC_PUBLIC + Opcodes.ACC_PRIVATE
+ Opcodes.ACC_PROTECTED + Opcodes.ACC_STATIC
+ Opcodes.ACC_FINAL + Opcodes.ACC_SYNCHRONIZED
+ Opcodes.ACC_BRIDGE + Opcodes.ACC_VARARGS + Opcodes.ACC_NATIVE
+ Opcodes.ACC_ABSTRACT + Opcodes.ACC_STRICT
- + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED
- + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
- CheckMethodAdapter.checkMethodIdentifier(version, name, "method name");
+ + Opcodes.ACC_SYNTHETIC + Opcodes.ACC_DEPRECATED + 0x40000); // ClassWriter.ACC_SYNTHETIC_ATTRIBUTE
+ if (!"<init>".equals(name) && !"<clinit>".equals(name)) {
+ CheckMethodAdapter.checkMethodIdentifier(version, name,
+ "method name");
+ }
CheckMethodAdapter.checkMethodDesc(desc);
if (signature != null) {
- CheckMethodAdapter.checkMethodSignature(signature);
+ checkMethodSignature(signature);
}
if (exceptions != null) {
for (int i = 0; i < exceptions.length; ++i) {
@@ -514,27 +498,19 @@ public class CheckClassAdapter extends ClassVisitor {
}
CheckMethodAdapter cma;
if (checkDataFlow) {
- cma = new CheckMethodAdapter(access,
- name,
- desc,
- super.visitMethod(access, name, desc, signature, exceptions),
- labels);
+ cma = new CheckMethodAdapter(access, name, desc, super.visitMethod(
+ access, name, desc, signature, exceptions), labels);
} else {
- cma = new CheckMethodAdapter(super.visitMethod(access,
- name,
- desc,
- signature,
- exceptions), labels);
+ cma = new CheckMethodAdapter(super.visitMethod(access, name, desc,
+ signature, exceptions), labels);
}
cma.version = version;
return cma;
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
checkState();
CheckMethodAdapter.checkDesc(desc, false);
return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -544,7 +520,8 @@ public class CheckClassAdapter extends ClassVisitor {
public void visitAttribute(final Attribute attr) {
checkState();
if (attr == null) {
- throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ throw new IllegalArgumentException(
+ "Invalid attribute (must not be null)");
}
super.visitAttribute(attr);
}
@@ -566,10 +543,12 @@ public class CheckClassAdapter extends ClassVisitor {
*/
private void checkState() {
if (!start) {
- throw new IllegalStateException("Cannot visit member before visit has been called.");
+ throw new IllegalStateException(
+ "Cannot visit member before visit has been called.");
}
if (end) {
- throw new IllegalStateException("Cannot visit member after visitEnd has been called.");
+ throw new IllegalStateException(
+ "Cannot visit member after visitEnd has been called.");
}
}
@@ -578,8 +557,10 @@ public class CheckClassAdapter extends ClassVisitor {
* method also checks that mutually incompatible flags are not set
* simultaneously.
*
- * @param access the access flags to be checked
- * @param possibleAccess the valid access flags.
+ * @param access
+ * the access flags to be checked
+ * @param possibleAccess
+ * the valid access flags.
*/
static void checkAccess(final int access, final int possibleAccess) {
if ((access & ~possibleAccess) != 0) {
@@ -590,14 +571,336 @@ public class CheckClassAdapter extends ClassVisitor {
int pri = (access & Opcodes.ACC_PRIVATE) == 0 ? 0 : 1;
int pro = (access & Opcodes.ACC_PROTECTED) == 0 ? 0 : 1;
if (pub + pri + pro > 1) {
- throw new IllegalArgumentException("public private and protected are mutually exclusive: "
- + access);
+ throw new IllegalArgumentException(
+ "public private and protected are mutually exclusive: "
+ + access);
}
int fin = (access & Opcodes.ACC_FINAL) == 0 ? 0 : 1;
int abs = (access & Opcodes.ACC_ABSTRACT) == 0 ? 0 : 1;
if (fin + abs > 1) {
- throw new IllegalArgumentException("final and abstract are mutually exclusive: "
- + access);
+ throw new IllegalArgumentException(
+ "final and abstract are mutually exclusive: " + access);
+ }
+ }
+
+ /**
+ * Checks a class signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ */
+ public static void checkClassSignature(final String signature) {
+ // ClassSignature:
+ // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkClassTypeSignature(signature, pos);
+ while (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a method signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ */
+ public static void checkMethodSignature(final String signature) {
+ // MethodTypeSignature:
+ // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
+ // ^ClassTypeSignature | ^TypeVariableSignature )*
+
+ int pos = 0;
+ if (getChar(signature, 0) == '<') {
+ pos = checkFormalTypeParameters(signature, pos);
+ }
+ pos = checkChar('(', signature, pos);
+ while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkTypeSignature(signature, pos);
+ }
+ pos = checkChar(')', signature, pos);
+ if (getChar(signature, pos) == 'V') {
+ ++pos;
+ } else {
+ pos = checkTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == '^') {
+ ++pos;
+ if (getChar(signature, pos) == 'L') {
+ pos = checkClassTypeSignature(signature, pos);
+ } else {
+ pos = checkTypeVariableSignature(signature, pos);
+ }
+ }
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks a field signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ */
+ public static void checkFieldSignature(final String signature) {
+ int pos = checkFieldTypeSignature(signature, 0);
+ if (pos != signature.length()) {
+ throw new IllegalArgumentException(signature + ": error at index "
+ + pos);
+ }
+ }
+
+ /**
+ * Checks the formal type parameters of a class or method signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameters(final String signature, int pos) {
+ // FormalTypeParameters:
+ // < FormalTypeParameter+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkFormalTypeParameter(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkFormalTypeParameter(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a formal type parameter of a class or method signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFormalTypeParameter(final String signature, int pos) {
+ // FormalTypeParameter:
+ // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
+
+ pos = checkIdentifier(signature, pos);
+ pos = checkChar(':', signature, pos);
+ if ("L[T".indexOf(getChar(signature, pos)) != -1) {
+ pos = checkFieldTypeSignature(signature, pos);
+ }
+ while (getChar(signature, pos) == ':') {
+ pos = checkFieldTypeSignature(signature, pos + 1);
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a field type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkFieldTypeSignature(final String signature, int pos) {
+ // FieldTypeSignature:
+ // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
+ //
+ // ArrayTypeSignature:
+ // [ TypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'L':
+ return checkClassTypeSignature(signature, pos);
+ case '[':
+ return checkTypeSignature(signature, pos + 1);
+ default:
+ return checkTypeVariableSignature(signature, pos);
}
}
+
+ /**
+ * Checks a class type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkClassTypeSignature(final String signature, int pos) {
+ // ClassTypeSignature:
+ // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
+ // TypeArguments? )* ;
+
+ pos = checkChar('L', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ while (getChar(signature, pos) == '/') {
+ pos = checkIdentifier(signature, pos + 1);
+ }
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ while (getChar(signature, pos) == '.') {
+ pos = checkIdentifier(signature, pos + 1);
+ if (getChar(signature, pos) == '<') {
+ pos = checkTypeArguments(signature, pos);
+ }
+ }
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks the type arguments in a class type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArguments(final String signature, int pos) {
+ // TypeArguments:
+ // < TypeArgument+ >
+
+ pos = checkChar('<', signature, pos);
+ pos = checkTypeArgument(signature, pos);
+ while (getChar(signature, pos) != '>') {
+ pos = checkTypeArgument(signature, pos);
+ }
+ return pos + 1;
+ }
+
+ /**
+ * Checks a type argument in a class type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeArgument(final String signature, int pos) {
+ // TypeArgument:
+ // * | ( ( + | - )? FieldTypeSignature )
+
+ char c = getChar(signature, pos);
+ if (c == '*') {
+ return pos + 1;
+ } else if (c == '+' || c == '-') {
+ pos++;
+ }
+ return checkFieldTypeSignature(signature, pos);
+ }
+
+ /**
+ * Checks a type variable signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeVariableSignature(final String signature,
+ int pos) {
+ // TypeVariableSignature:
+ // T Identifier ;
+
+ pos = checkChar('T', signature, pos);
+ pos = checkIdentifier(signature, pos);
+ return checkChar(';', signature, pos);
+ }
+
+ /**
+ * Checks a type signature.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkTypeSignature(final String signature, int pos) {
+ // TypeSignature:
+ // Z | C | B | S | I | F | J | D | FieldTypeSignature
+
+ switch (getChar(signature, pos)) {
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return pos + 1;
+ default:
+ return checkFieldTypeSignature(signature, pos);
+ }
+ }
+
+ /**
+ * Checks an identifier.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkIdentifier(final String signature, int pos) {
+ if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
+ throw new IllegalArgumentException(signature
+ + ": identifier expected at index " + pos);
+ }
+ ++pos;
+ while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
+ ++pos;
+ }
+ return pos;
+ }
+
+ /**
+ * Checks a single character.
+ *
+ * @param signature
+ * a string containing the signature that must be checked.
+ * @param pos
+ * index of first character to be checked.
+ * @return the index of the first character after the checked part.
+ */
+ private static int checkChar(final char c, final String signature, int pos) {
+ if (getChar(signature, pos) == c) {
+ return pos + 1;
+ }
+ throw new IllegalArgumentException(signature + ": '" + c
+ + "' expected at index " + pos);
+ }
+
+ /**
+ * Returns the signature car at the given index.
+ *
+ * @param signature
+ * a signature.
+ * @param pos
+ * an index in signature.
+ * @return the character at the given index, or 0 if there is no such
+ * character.
+ */
+ private static char getChar(final String signature, int pos) {
+ return pos < signature.length() ? signature.charAt(pos) : (char) 0;
+ }
}
diff --git a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
index bdcbe14b16..4657605936 100644
--- a/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckFieldAdapter.java
@@ -46,7 +46,8 @@ public class CheckFieldAdapter extends FieldVisitor {
* this constructor</i>. Instead, they must use the
* {@link #CheckFieldAdapter(int, FieldVisitor)} version.
*
- * @param fv the field visitor to which this adapter must delegate calls.
+ * @param fv
+ * the field visitor to which this adapter must delegate calls.
*/
public CheckFieldAdapter(final FieldVisitor fv) {
this(Opcodes.ASM4, fv);
@@ -55,19 +56,19 @@ public class CheckFieldAdapter extends FieldVisitor {
/**
* Constructs a new {@link CheckFieldAdapter}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param fv the field visitor to which this adapter must delegate calls.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param fv
+ * the field visitor to which this adapter must delegate calls.
*/
protected CheckFieldAdapter(final int api, final FieldVisitor fv) {
super(api, fv);
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
checkEnd();
CheckMethodAdapter.checkDesc(desc, false);
return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -77,7 +78,8 @@ public class CheckFieldAdapter extends FieldVisitor {
public void visitAttribute(final Attribute attr) {
checkEnd();
if (attr == null) {
- throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ throw new IllegalArgumentException(
+ "Invalid attribute (must not be null)");
}
super.visitAttribute(attr);
}
@@ -91,7 +93,8 @@ public class CheckFieldAdapter extends FieldVisitor {
private void checkEnd() {
if (end) {
- throw new IllegalStateException("Cannot call a visit method after visitEnd has been called");
+ throw new IllegalStateException(
+ "Cannot call a visit method after visitEnd has been called");
}
}
}
diff --git a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
index 7549765421..9da01c9d6e 100644
--- a/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckMethodAdapter.java
@@ -58,7 +58,7 @@ import scala.tools.asm.tree.analysis.BasicVerifier;
* arguments - such as the fact that the given opcode is correct for a given
* visit method. This adapter can also perform some basic data flow checks (more
* precisely those that can be performed without the full class hierarchy - see
- * {@link org.objectweb.asm.tree.analysis.BasicVerifier}). For instance in a
+ * {@link scala.tools.asm.tree.analysis.BasicVerifier}). For instance in a
* method whose signature is <tt>void m ()</tt>, the invalid instruction
* IRETURN, or the invalid sequence IADD L2I will be detected if the data flow
* checks are enabled. These checks are enabled by using the
@@ -75,6 +75,11 @@ public class CheckMethodAdapter extends MethodVisitor {
public int version;
/**
+ * The access flags of the method.
+ */
+ private int access;
+
+ /**
* <tt>true</tt> if the visitCode method has been called.
*/
private boolean startCode;
@@ -107,6 +112,21 @@ public class CheckMethodAdapter extends MethodVisitor {
private Set<Label> usedLabels;
/**
+ * Number of visited frames in expanded form.
+ */
+ private int expandedFrames;
+
+ /**
+ * Number of visited frames in compressed form.
+ */
+ private int compressedFrames;
+
+ /**
+ * Number of instructions before the last visited frame.
+ */
+ private int lastFrame = -1;
+
+ /**
* The exception handler ranges. Each pair of list element contains the
* start and end labels of an exception handler block.
*/
@@ -352,7 +372,8 @@ public class CheckMethodAdapter extends MethodVisitor {
* <i>Subclasses must not use this constructor</i>. Instead, they must use
* the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
*
- * @param mv the method visitor to which this adapter must delegate calls.
+ * @param mv
+ * the method visitor to which this adapter must delegate calls.
*/
public CheckMethodAdapter(final MethodVisitor mv) {
this(mv, new HashMap<Label, Integer>());
@@ -365,13 +386,13 @@ public class CheckMethodAdapter extends MethodVisitor {
* <i>Subclasses must not use this constructor</i>. Instead, they must use
* the {@link #CheckMethodAdapter(int, MethodVisitor, Map)} version.
*
- * @param mv the method visitor to which this adapter must delegate calls.
- * @param labels a map of already visited labels (in other methods).
+ * @param mv
+ * the method visitor to which this adapter must delegate calls.
+ * @param labels
+ * a map of already visited labels (in other methods).
*/
- public CheckMethodAdapter(
- final MethodVisitor mv,
- final Map<Label, Integer> labels)
- {
+ public CheckMethodAdapter(final MethodVisitor mv,
+ final Map<Label, Integer> labels) {
this(Opcodes.ASM4, mv, labels);
}
@@ -380,14 +401,13 @@ public class CheckMethodAdapter extends MethodVisitor {
* will not perform any data flow check (see
* {@link #CheckMethodAdapter(int,String,String,MethodVisitor,Map)}).
*
- * @param mv the method visitor to which this adapter must delegate calls.
- * @param labels a map of already visited labels (in other methods).
+ * @param mv
+ * the method visitor to which this adapter must delegate calls.
+ * @param labels
+ * a map of already visited labels (in other methods).
*/
- protected CheckMethodAdapter(
- final int api,
- final MethodVisitor mv,
- final Map<Label, Integer> labels)
- {
+ protected CheckMethodAdapter(final int api, final MethodVisitor mv,
+ final Map<Label, Integer> labels) {
super(api, mv);
this.labels = labels;
this.usedLabels = new HashSet<Label>();
@@ -400,30 +420,32 @@ public class CheckMethodAdapter extends MethodVisitor {
* signature is <tt>void m ()</tt>, the invalid instruction IRETURN, or the
* invalid sequence IADD L2I will be detected.
*
- * @param access the method's access flags.
- * @param name the method's name.
- * @param desc the method's descriptor (see {@link Type Type}).
- * @param cmv the method visitor to which this adapter must delegate calls.
- * @param labels a map of already visited labels (in other methods).
+ * @param access
+ * the method's access flags.
+ * @param name
+ * the method's name.
+ * @param desc
+ * the method's descriptor (see {@link Type Type}).
+ * @param cmv
+ * the method visitor to which this adapter must delegate calls.
+ * @param labels
+ * a map of already visited labels (in other methods).
*/
- public CheckMethodAdapter(
- final int access,
- final String name,
- final String desc,
- final MethodVisitor cmv,
- final Map<Label, Integer> labels)
- {
+ public CheckMethodAdapter(final int access, final String name,
+ final String desc, final MethodVisitor cmv,
+ final Map<Label, Integer> labels) {
this(new MethodNode(access, name, desc, null, null) {
@Override
public void visitEnd() {
- Analyzer<BasicValue> a = new Analyzer<BasicValue>(new BasicVerifier());
+ Analyzer<BasicValue> a = new Analyzer<BasicValue>(
+ new BasicVerifier());
try {
a.analyze("dummy", this);
} catch (Exception e) {
if (e instanceof IndexOutOfBoundsException
- && maxLocals == 0 && maxStack == 0)
- {
- throw new RuntimeException("Data flow checking option requires valid, non zero maxLocals and maxStack values.");
+ && maxLocals == 0 && maxStack == 0) {
+ throw new RuntimeException(
+ "Data flow checking option requires valid, non zero maxLocals and maxStack values.");
}
e.printStackTrace();
StringWriter sw = new StringWriter();
@@ -435,15 +457,13 @@ public class CheckMethodAdapter extends MethodVisitor {
}
accept(cmv);
}
- },
- labels);
+ }, labels);
+ this.access = access;
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
checkEndMethod();
checkDesc(desc, false);
return new CheckAnnotationAdapter(super.visitAnnotation(desc, visible));
@@ -456,68 +476,68 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
checkEndMethod();
checkDesc(desc, false);
- return new CheckAnnotationAdapter(super.visitParameterAnnotation(parameter,
- desc,
- visible));
+ return new CheckAnnotationAdapter(super.visitParameterAnnotation(
+ parameter, desc, visible));
}
@Override
public void visitAttribute(final Attribute attr) {
checkEndMethod();
if (attr == null) {
- throw new IllegalArgumentException("Invalid attribute (must not be null)");
+ throw new IllegalArgumentException(
+ "Invalid attribute (must not be null)");
}
super.visitAttribute(attr);
}
@Override
public void visitCode() {
+ if ((access & Opcodes.ACC_ABSTRACT) != 0) {
+ throw new RuntimeException("Abstract methods cannot have code");
+ }
startCode = true;
super.visitCode();
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
+ if (insnCount == lastFrame) {
+ throw new IllegalStateException(
+ "At most one frame can be visited at a given code location.");
+ }
+ lastFrame = insnCount;
int mLocal;
int mStack;
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- mLocal = Integer.MAX_VALUE;
- mStack = Integer.MAX_VALUE;
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ mLocal = Integer.MAX_VALUE;
+ mStack = Integer.MAX_VALUE;
+ break;
- case Opcodes.F_SAME:
- mLocal = 0;
- mStack = 0;
- break;
+ case Opcodes.F_SAME:
+ mLocal = 0;
+ mStack = 0;
+ break;
- case Opcodes.F_SAME1:
- mLocal = 0;
- mStack = 1;
- break;
+ case Opcodes.F_SAME1:
+ mLocal = 0;
+ mStack = 1;
+ break;
- case Opcodes.F_APPEND:
- case Opcodes.F_CHOP:
- mLocal = 3;
- mStack = 0;
- break;
+ case Opcodes.F_APPEND:
+ case Opcodes.F_CHOP:
+ mLocal = 3;
+ mStack = 0;
+ break;
- default:
- throw new IllegalArgumentException("Invalid frame type " + type);
+ default:
+ throw new IllegalArgumentException("Invalid frame type " + type);
}
if (nLocal > mLocal) {
@@ -531,19 +551,29 @@ public class CheckMethodAdapter extends MethodVisitor {
if (type != Opcodes.F_CHOP) {
if (nLocal > 0 && (local == null || local.length < nLocal)) {
- throw new IllegalArgumentException("Array local[] is shorter than nLocal");
+ throw new IllegalArgumentException(
+ "Array local[] is shorter than nLocal");
}
for (int i = 0; i < nLocal; ++i) {
checkFrameValue(local[i]);
}
}
if (nStack > 0 && (stack == null || stack.length < nStack)) {
- throw new IllegalArgumentException("Array stack[] is shorter than nStack");
+ throw new IllegalArgumentException(
+ "Array stack[] is shorter than nStack");
}
for (int i = 0; i < nStack; ++i) {
checkFrameValue(stack[i]);
}
-
+ if (type == Opcodes.F_NEW) {
+ ++expandedFrames;
+ } else {
+ ++compressedFrames;
+ }
+ if (expandedFrames > 0 && compressedFrames > 0) {
+ throw new RuntimeException(
+ "Expanded and compressed frames must not be mixed.");
+ }
super.visitFrame(type, nLocal, local, nStack, stack);
}
@@ -562,18 +592,19 @@ public class CheckMethodAdapter extends MethodVisitor {
checkEndCode();
checkOpcode(opcode, 1);
switch (opcode) {
- case Opcodes.BIPUSH:
- checkSignedByte(operand, "Invalid operand");
- break;
- case Opcodes.SIPUSH:
- checkSignedShort(operand, "Invalid operand");
- break;
- // case Constants.NEWARRAY:
- default:
- if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
- throw new IllegalArgumentException("Invalid operand (must be an array type code T_...): "
- + operand);
- }
+ case Opcodes.BIPUSH:
+ checkSignedByte(operand, "Invalid operand");
+ break;
+ case Opcodes.SIPUSH:
+ checkSignedShort(operand, "Invalid operand");
+ break;
+ // case Constants.NEWARRAY:
+ default:
+ if (operand < Opcodes.T_BOOLEAN || operand > Opcodes.T_LONG) {
+ throw new IllegalArgumentException(
+ "Invalid operand (must be an array type code T_...): "
+ + operand);
+ }
}
super.visitIntInsn(opcode, operand);
++insnCount;
@@ -596,20 +627,16 @@ public class CheckMethodAdapter extends MethodVisitor {
checkOpcode(opcode, 3);
checkInternalName(type, "type");
if (opcode == Opcodes.NEW && type.charAt(0) == '[') {
- throw new IllegalArgumentException("NEW cannot be used to create arrays: "
- + type);
+ throw new IllegalArgumentException(
+ "NEW cannot be used to create arrays: " + type);
}
super.visitTypeInsn(opcode, type);
++insnCount;
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
checkStartCode();
checkEndCode();
checkOpcode(opcode, 4);
@@ -621,16 +648,14 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
checkStartCode();
checkEndCode();
checkOpcode(opcode, 5);
- checkMethodIdentifier(version, name, "name");
+ if (opcode != Opcodes.INVOKESPECIAL || !"<init>".equals(name)) {
+ checkMethodIdentifier(version, name, "name");
+ }
checkInternalName(owner, "owner");
checkMethodDesc(desc);
super.visitMethodInsn(opcode, owner, name, desc);
@@ -638,19 +663,14 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
checkStartCode();
checkEndCode();
checkMethodIdentifier(version, name, "name");
checkMethodDesc(desc);
if (bsm.getTag() != Opcodes.H_INVOKESTATIC
- && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL)
- {
+ && bsm.getTag() != Opcodes.H_NEWINVOKESPECIAL) {
throw new IllegalArgumentException("invalid handle tag "
+ bsm.getTag());
}
@@ -705,12 +725,8 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
checkStartCode();
checkEndCode();
if (max < min) {
@@ -720,7 +736,8 @@ public class CheckMethodAdapter extends MethodVisitor {
checkLabel(dflt, false, "default label");
checkNonDebugLabel(dflt);
if (labels == null || labels.length != max - min + 1) {
- throw new IllegalArgumentException("There must be max - min + 1 labels");
+ throw new IllegalArgumentException(
+ "There must be max - min + 1 labels");
}
for (int i = 0; i < labels.length; ++i) {
checkLabel(labels[i], false, "label at index " + i);
@@ -734,17 +751,15 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
checkEndCode();
checkStartCode();
checkLabel(dflt, false, "default label");
checkNonDebugLabel(dflt);
if (keys == null || labels == null || keys.length != labels.length) {
- throw new IllegalArgumentException("There must be the same number of keys and labels");
+ throw new IllegalArgumentException(
+ "There must be the same number of keys and labels");
}
for (int i = 0; i < labels.length; ++i) {
checkLabel(labels[i], false, "label at index " + i);
@@ -764,28 +779,26 @@ public class CheckMethodAdapter extends MethodVisitor {
checkEndCode();
checkDesc(desc, false);
if (desc.charAt(0) != '[') {
- throw new IllegalArgumentException("Invalid descriptor (must be an array type descriptor): "
- + desc);
+ throw new IllegalArgumentException(
+ "Invalid descriptor (must be an array type descriptor): "
+ + desc);
}
if (dims < 1) {
- throw new IllegalArgumentException("Invalid dimensions (must be greater than 0): "
- + dims);
+ throw new IllegalArgumentException(
+ "Invalid dimensions (must be greater than 0): " + dims);
}
if (dims > desc.lastIndexOf('[') + 1) {
- throw new IllegalArgumentException("Invalid dimensions (must not be greater than dims(desc)): "
- + dims);
+ throw new IllegalArgumentException(
+ "Invalid dimensions (must not be greater than dims(desc)): "
+ + dims);
}
super.visitMultiANewArrayInsn(desc, dims);
++insnCount;
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
checkStartCode();
checkEndCode();
checkLabel(start, false, "start label");
@@ -795,9 +808,9 @@ public class CheckMethodAdapter extends MethodVisitor {
checkNonDebugLabel(end);
checkNonDebugLabel(handler);
if (labels.get(start) != null || labels.get(end) != null
- || labels.get(handler) != null)
- {
- throw new IllegalStateException("Try catch blocks must be visited before their labels");
+ || labels.get(handler) != null) {
+ throw new IllegalStateException(
+ "Try catch blocks must be visited before their labels");
}
if (type != null) {
checkInternalName(type, "type");
@@ -808,14 +821,9 @@ public class CheckMethodAdapter extends MethodVisitor {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
checkStartCode();
checkEndCode();
checkUnqualifiedName(version, name, "name");
@@ -826,7 +834,8 @@ public class CheckMethodAdapter extends MethodVisitor {
int s = labels.get(start).intValue();
int e = labels.get(end).intValue();
if (e < s) {
- throw new IllegalArgumentException("Invalid start and end labels (end must be greater than start)");
+ throw new IllegalArgumentException(
+ "Invalid start and end labels (end must be greater than start)");
}
super.visitLocalVariable(name, desc, signature, start, end, index);
}
@@ -850,14 +859,16 @@ public class CheckMethodAdapter extends MethodVisitor {
throw new IllegalStateException("Undefined label used");
}
}
- for (int i = 0; i < handlers.size(); ) {
+ for (int i = 0; i < handlers.size();) {
Integer start = labels.get(handlers.get(i++));
Integer end = labels.get(handlers.get(i++));
if (start == null || end == null) {
- throw new IllegalStateException("Undefined try catch block labels");
+ throw new IllegalStateException(
+ "Undefined try catch block labels");
}
if (end.intValue() <= start.intValue()) {
- throw new IllegalStateException("Emty try catch block handler range");
+ throw new IllegalStateException(
+ "Emty try catch block handler range");
}
}
checkUnsignedShort(maxStack, "Invalid max stack");
@@ -879,7 +890,8 @@ public class CheckMethodAdapter extends MethodVisitor {
*/
void checkStartCode() {
if (!startCode) {
- throw new IllegalStateException("Cannot visit instructions before visitCode has been called.");
+ throw new IllegalStateException(
+ "Cannot visit instructions before visitCode has been called.");
}
}
@@ -888,7 +900,8 @@ public class CheckMethodAdapter extends MethodVisitor {
*/
void checkEndCode() {
if (endCode) {
- throw new IllegalStateException("Cannot visit instructions after visitMaxs has been called.");
+ throw new IllegalStateException(
+ "Cannot visit instructions after visitMaxs has been called.");
}
}
@@ -897,21 +910,22 @@ public class CheckMethodAdapter extends MethodVisitor {
*/
void checkEndMethod() {
if (endMethod) {
- throw new IllegalStateException("Cannot visit elements after visitEnd has been called.");
+ throw new IllegalStateException(
+ "Cannot visit elements after visitEnd has been called.");
}
}
/**
* Checks a stack frame value.
*
- * @param value the value to be checked.
+ * @param value
+ * the value to be checked.
*/
void checkFrameValue(final Object value) {
if (value == Opcodes.TOP || value == Opcodes.INTEGER
|| value == Opcodes.FLOAT || value == Opcodes.LONG
|| value == Opcodes.DOUBLE || value == Opcodes.NULL
- || value == Opcodes.UNINITIALIZED_THIS)
- {
+ || value == Opcodes.UNINITIALIZED_THIS) {
return;
}
if (value instanceof String) {
@@ -929,8 +943,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the type of the given opcode is equal to the given type.
*
- * @param opcode the opcode to be checked.
- * @param type the expected opcode type.
+ * @param opcode
+ * the opcode to be checked.
+ * @param type
+ * the expected opcode type.
*/
static void checkOpcode(final int opcode, final int type) {
if (opcode < 0 || opcode > 199 || TYPE[opcode] != type) {
@@ -941,8 +957,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given value is a signed byte.
*
- * @param value the value to be checked.
- * @param msg an message to be used in case of error.
+ * @param value
+ * the value to be checked.
+ * @param msg
+ * an message to be used in case of error.
*/
static void checkSignedByte(final int value, final String msg) {
if (value < Byte.MIN_VALUE || value > Byte.MAX_VALUE) {
@@ -954,8 +972,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given value is a signed short.
*
- * @param value the value to be checked.
- * @param msg an message to be used in case of error.
+ * @param value
+ * the value to be checked.
+ * @param msg
+ * an message to be used in case of error.
*/
static void checkSignedShort(final int value, final String msg) {
if (value < Short.MIN_VALUE || value > Short.MAX_VALUE) {
@@ -967,8 +987,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given value is an unsigned short.
*
- * @param value the value to be checked.
- * @param msg an message to be used in case of error.
+ * @param value
+ * the value to be checked.
+ * @param msg
+ * an message to be used in case of error.
*/
static void checkUnsignedShort(final int value, final String msg) {
if (value < 0 || value > 65535) {
@@ -981,13 +1003,13 @@ public class CheckMethodAdapter extends MethodVisitor {
* Checks that the given value is an {@link Integer}, a{@link Float}, a
* {@link Long}, a {@link Double} or a {@link String}.
*
- * @param cst the value to be checked.
+ * @param cst
+ * the value to be checked.
*/
static void checkConstant(final Object cst) {
if (!(cst instanceof Integer) && !(cst instanceof Float)
&& !(cst instanceof Long) && !(cst instanceof Double)
- && !(cst instanceof String))
- {
+ && !(cst instanceof String)) {
throw new IllegalArgumentException("Invalid constant: " + cst);
}
}
@@ -999,19 +1021,21 @@ public class CheckMethodAdapter extends MethodVisitor {
throw new IllegalArgumentException("Illegal LDC constant value");
}
if (s != Type.METHOD && (version & 0xFFFF) < Opcodes.V1_5) {
- throw new IllegalArgumentException("ldc of a constant class requires at least version 1.5");
+ throw new IllegalArgumentException(
+ "ldc of a constant class requires at least version 1.5");
}
if (s == Type.METHOD && (version & 0xFFFF) < Opcodes.V1_7) {
- throw new IllegalArgumentException("ldc of a method type requires at least version 1.7");
+ throw new IllegalArgumentException(
+ "ldc of a method type requires at least version 1.7");
}
} else if (cst instanceof Handle) {
if ((version & 0xFFFF) < Opcodes.V1_7) {
- throw new IllegalArgumentException("ldc of a handle requires at least version 1.7");
+ throw new IllegalArgumentException(
+ "ldc of a handle requires at least version 1.7");
}
int tag = ((Handle) cst).getTag();
if (tag < Opcodes.H_GETFIELD || tag > Opcodes.H_INVOKEINTERFACE) {
- throw new IllegalArgumentException("invalid handle tag "
- + tag);
+ throw new IllegalArgumentException("invalid handle tag " + tag);
}
} else {
checkConstant(cst);
@@ -1021,15 +1045,15 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given string is a valid unqualified name.
*
- * @param version the class version.
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param version
+ * the class version.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkUnqualifiedName(
- int version,
- final String name,
- final String msg)
- {
+ static void checkUnqualifiedName(int version, final String name,
+ final String msg) {
if ((version & 0xFFFF) < Opcodes.V1_5) {
checkIdentifier(name, msg);
} else {
@@ -1045,8 +1069,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given string is a valid Java identifier.
*
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
static void checkIdentifier(final String name, final String msg) {
checkIdentifier(name, 0, -1, msg);
@@ -1055,21 +1081,20 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given substring is a valid Java identifier.
*
- * @param name the string to be checked.
- * @param start index of the first character of the identifier (inclusive).
- * @param end index of the last character of the identifier (exclusive). -1
- * is equivalent to <tt>name.length()</tt> if name is not
- * <tt>null</tt>.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param start
+ * index of the first character of the identifier (inclusive).
+ * @param end
+ * index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkIdentifier(
- final String name,
- final int start,
- final int end,
- final String msg)
- {
- if (name == null || (end == -1 ? name.length() <= start : end <= start))
- {
+ static void checkIdentifier(final String name, final int start,
+ final int end, final String msg) {
+ if (name == null || (end == -1 ? name.length() <= start : end <= start)) {
throw new IllegalArgumentException("Invalid " + msg
+ " (must not be null or empty)");
}
@@ -1087,25 +1112,21 @@ public class CheckMethodAdapter extends MethodVisitor {
}
/**
- * Checks that the given string is a valid Java identifier or is equal to
- * '&lt;init&gt;' or '&lt;clinit&gt;'.
+ * Checks that the given string is a valid Java identifier.
*
- * @param version the class version.
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param version
+ * the class version.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkMethodIdentifier(
- int version,
- final String name,
- final String msg)
- {
+ static void checkMethodIdentifier(int version, final String name,
+ final String msg) {
if (name == null || name.length() == 0) {
throw new IllegalArgumentException("Invalid " + msg
+ " (must not be null or empty)");
}
- if ("<init>".equals(name) || "<clinit>".equals(name)) {
- return;
- }
if ((version & 0xFFFF) >= Opcodes.V1_5) {
for (int i = 0; i < name.length(); ++i) {
if (".;[/<>".indexOf(name.charAt(i)) != -1) {
@@ -1116,17 +1137,19 @@ public class CheckMethodAdapter extends MethodVisitor {
return;
}
if (!Character.isJavaIdentifierStart(name.charAt(0))) {
- throw new IllegalArgumentException("Invalid "
- + msg
- + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
- + name);
+ throw new IllegalArgumentException(
+ "Invalid "
+ + msg
+ + " (must be a '<init>', '<clinit>' or a valid Java identifier): "
+ + name);
}
for (int i = 1; i < name.length(); ++i) {
if (!Character.isJavaIdentifierPart(name.charAt(i))) {
- throw new IllegalArgumentException("Invalid "
- + msg
- + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
- + name);
+ throw new IllegalArgumentException(
+ "Invalid "
+ + msg
+ + " (must be '<init>' or '<clinit>' or a valid Java identifier): "
+ + name);
}
}
}
@@ -1134,8 +1157,10 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given string is a valid internal class name.
*
- * @param name the string to be checked.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param msg
+ * a message to be used in case of error.
*/
static void checkInternalName(final String name, final String msg) {
if (name == null || name.length() == 0) {
@@ -1152,19 +1177,19 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given substring is a valid internal class name.
*
- * @param name the string to be checked.
- * @param start index of the first character of the identifier (inclusive).
- * @param end index of the last character of the identifier (exclusive). -1
- * is equivalent to <tt>name.length()</tt> if name is not
- * <tt>null</tt>.
- * @param msg a message to be used in case of error.
+ * @param name
+ * the string to be checked.
+ * @param start
+ * index of the first character of the identifier (inclusive).
+ * @param end
+ * index of the last character of the identifier (exclusive). -1
+ * is equivalent to <tt>name.length()</tt> if name is not
+ * <tt>null</tt>.
+ * @param msg
+ * a message to be used in case of error.
*/
- static void checkInternalName(
- final String name,
- final int start,
- final int end,
- final String msg)
- {
+ static void checkInternalName(final String name, final int start,
+ final int end, final String msg) {
int max = end == -1 ? name.length() : end;
try {
int begin = start;
@@ -1178,18 +1203,21 @@ public class CheckMethodAdapter extends MethodVisitor {
begin = slash + 1;
} while (slash != max);
} catch (IllegalArgumentException _) {
- throw new IllegalArgumentException("Invalid "
- + msg
- + " (must be a fully qualified class name in internal form): "
- + name);
+ throw new IllegalArgumentException(
+ "Invalid "
+ + msg
+ + " (must be a fully qualified class name in internal form): "
+ + name);
}
}
/**
* Checks that the given string is a valid type descriptor.
*
- * @param desc the string to be checked.
- * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ * @param desc
+ * the string to be checked.
+ * @param canBeVoid
+ * <tt>true</tt> if <tt>V</tt> can be considered valid.
*/
static void checkDesc(final String desc, final boolean canBeVoid) {
int end = checkDesc(desc, 0, canBeVoid);
@@ -1201,75 +1229,77 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that a the given substring is a valid type descriptor.
*
- * @param desc the string to be checked.
- * @param start index of the first character of the identifier (inclusive).
- * @param canBeVoid <tt>true</tt> if <tt>V</tt> can be considered valid.
+ * @param desc
+ * the string to be checked.
+ * @param start
+ * index of the first character of the identifier (inclusive).
+ * @param canBeVoid
+ * <tt>true</tt> if <tt>V</tt> can be considered valid.
* @return the index of the last character of the type decriptor, plus one.
*/
- static int checkDesc(
- final String desc,
- final int start,
- final boolean canBeVoid)
- {
+ static int checkDesc(final String desc, final int start,
+ final boolean canBeVoid) {
if (desc == null || start >= desc.length()) {
- throw new IllegalArgumentException("Invalid type descriptor (must not be null or empty)");
+ throw new IllegalArgumentException(
+ "Invalid type descriptor (must not be null or empty)");
}
int index;
switch (desc.charAt(start)) {
- case 'V':
- if (canBeVoid) {
- return start + 1;
- } else {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- case 'F':
- case 'J':
- case 'D':
+ case 'V':
+ if (canBeVoid) {
return start + 1;
- case '[':
- index = start + 1;
- while (index < desc.length() && desc.charAt(index) == '[') {
- ++index;
- }
- if (index < desc.length()) {
- return checkDesc(desc, index, false);
- } else {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- case 'L':
- index = desc.indexOf(';', start);
- if (index == -1 || index - start < 2) {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- try {
- checkInternalName(desc, start + 1, index, null);
- } catch (IllegalArgumentException _) {
- throw new IllegalArgumentException("Invalid descriptor: "
- + desc);
- }
- return index + 1;
- default:
+ } else {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ case 'Z':
+ case 'C':
+ case 'B':
+ case 'S':
+ case 'I':
+ case 'F':
+ case 'J':
+ case 'D':
+ return start + 1;
+ case '[':
+ index = start + 1;
+ while (index < desc.length() && desc.charAt(index) == '[') {
+ ++index;
+ }
+ if (index < desc.length()) {
+ return checkDesc(desc, index, false);
+ } else {
throw new IllegalArgumentException("Invalid descriptor: "
+ desc);
+ }
+ case 'L':
+ index = desc.indexOf(';', start);
+ if (index == -1 || index - start < 2) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ try {
+ checkInternalName(desc, start + 1, index, null);
+ } catch (IllegalArgumentException _) {
+ throw new IllegalArgumentException("Invalid descriptor: "
+ + desc);
+ }
+ return index + 1;
+ default:
+ throw new IllegalArgumentException("Invalid descriptor: " + desc);
}
}
/**
* Checks that the given string is a valid method descriptor.
*
- * @param desc the string to be checked.
+ * @param desc
+ * the string to be checked.
*/
static void checkMethodDesc(final String desc) {
if (desc == null || desc.length() == 0) {
- throw new IllegalArgumentException("Invalid method descriptor (must not be null or empty)");
+ throw new IllegalArgumentException(
+ "Invalid method descriptor (must not be null or empty)");
}
if (desc.charAt(0) != '(' || desc.length() < 3) {
throw new IllegalArgumentException("Invalid descriptor: " + desc);
@@ -1291,322 +1321,18 @@ public class CheckMethodAdapter extends MethodVisitor {
}
/**
- * Checks a class signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- static void checkClassSignature(final String signature) {
- // ClassSignature:
- // FormalTypeParameters? ClassTypeSignature ClassTypeSignature*
-
- int pos = 0;
- if (getChar(signature, 0) == '<') {
- pos = checkFormalTypeParameters(signature, pos);
- }
- pos = checkClassTypeSignature(signature, pos);
- while (getChar(signature, pos) == 'L') {
- pos = checkClassTypeSignature(signature, pos);
- }
- if (pos != signature.length()) {
- throw new IllegalArgumentException(signature + ": error at index "
- + pos);
- }
- }
-
- /**
- * Checks a method signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- static void checkMethodSignature(final String signature) {
- // MethodTypeSignature:
- // FormalTypeParameters? ( TypeSignature* ) ( TypeSignature | V ) (
- // ^ClassTypeSignature | ^TypeVariableSignature )*
-
- int pos = 0;
- if (getChar(signature, 0) == '<') {
- pos = checkFormalTypeParameters(signature, pos);
- }
- pos = checkChar('(', signature, pos);
- while ("ZCBSIFJDL[T".indexOf(getChar(signature, pos)) != -1) {
- pos = checkTypeSignature(signature, pos);
- }
- pos = checkChar(')', signature, pos);
- if (getChar(signature, pos) == 'V') {
- ++pos;
- } else {
- pos = checkTypeSignature(signature, pos);
- }
- while (getChar(signature, pos) == '^') {
- ++pos;
- if (getChar(signature, pos) == 'L') {
- pos = checkClassTypeSignature(signature, pos);
- } else {
- pos = checkTypeVariableSignature(signature, pos);
- }
- }
- if (pos != signature.length()) {
- throw new IllegalArgumentException(signature + ": error at index "
- + pos);
- }
- }
-
- /**
- * Checks a field signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- static void checkFieldSignature(final String signature) {
- int pos = checkFieldTypeSignature(signature, 0);
- if (pos != signature.length()) {
- throw new IllegalArgumentException(signature + ": error at index "
- + pos);
- }
- }
-
- /**
- * Checks the formal type parameters of a class or method signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkFormalTypeParameters(final String signature, int pos)
- {
- // FormalTypeParameters:
- // < FormalTypeParameter+ >
-
- pos = checkChar('<', signature, pos);
- pos = checkFormalTypeParameter(signature, pos);
- while (getChar(signature, pos) != '>') {
- pos = checkFormalTypeParameter(signature, pos);
- }
- return pos + 1;
- }
-
- /**
- * Checks a formal type parameter of a class or method signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkFormalTypeParameter(final String signature, int pos)
- {
- // FormalTypeParameter:
- // Identifier : FieldTypeSignature? (: FieldTypeSignature)*
-
- pos = checkIdentifier(signature, pos);
- pos = checkChar(':', signature, pos);
- if ("L[T".indexOf(getChar(signature, pos)) != -1) {
- pos = checkFieldTypeSignature(signature, pos);
- }
- while (getChar(signature, pos) == ':') {
- pos = checkFieldTypeSignature(signature, pos + 1);
- }
- return pos;
- }
-
- /**
- * Checks a field type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkFieldTypeSignature(final String signature, int pos)
- {
- // FieldTypeSignature:
- // ClassTypeSignature | ArrayTypeSignature | TypeVariableSignature
- //
- // ArrayTypeSignature:
- // [ TypeSignature
-
- switch (getChar(signature, pos)) {
- case 'L':
- return checkClassTypeSignature(signature, pos);
- case '[':
- return checkTypeSignature(signature, pos + 1);
- default:
- return checkTypeVariableSignature(signature, pos);
- }
- }
-
- /**
- * Checks a class type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkClassTypeSignature(final String signature, int pos)
- {
- // ClassTypeSignature:
- // L Identifier ( / Identifier )* TypeArguments? ( . Identifier
- // TypeArguments? )* ;
-
- pos = checkChar('L', signature, pos);
- pos = checkIdentifier(signature, pos);
- while (getChar(signature, pos) == '/') {
- pos = checkIdentifier(signature, pos + 1);
- }
- if (getChar(signature, pos) == '<') {
- pos = checkTypeArguments(signature, pos);
- }
- while (getChar(signature, pos) == '.') {
- pos = checkIdentifier(signature, pos + 1);
- if (getChar(signature, pos) == '<') {
- pos = checkTypeArguments(signature, pos);
- }
- }
- return checkChar(';', signature, pos);
- }
-
- /**
- * Checks the type arguments in a class type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeArguments(final String signature, int pos) {
- // TypeArguments:
- // < TypeArgument+ >
-
- pos = checkChar('<', signature, pos);
- pos = checkTypeArgument(signature, pos);
- while (getChar(signature, pos) != '>') {
- pos = checkTypeArgument(signature, pos);
- }
- return pos + 1;
- }
-
- /**
- * Checks a type argument in a class type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeArgument(final String signature, int pos) {
- // TypeArgument:
- // * | ( ( + | - )? FieldTypeSignature )
-
- char c = getChar(signature, pos);
- if (c == '*') {
- return pos + 1;
- } else if (c == '+' || c == '-') {
- pos++;
- }
- return checkFieldTypeSignature(signature, pos);
- }
-
- /**
- * Checks a type variable signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeVariableSignature(
- final String signature,
- int pos)
- {
- // TypeVariableSignature:
- // T Identifier ;
-
- pos = checkChar('T', signature, pos);
- pos = checkIdentifier(signature, pos);
- return checkChar(';', signature, pos);
- }
-
- /**
- * Checks a type signature.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkTypeSignature(final String signature, int pos) {
- // TypeSignature:
- // Z | C | B | S | I | F | J | D | FieldTypeSignature
-
- switch (getChar(signature, pos)) {
- case 'Z':
- case 'C':
- case 'B':
- case 'S':
- case 'I':
- case 'F':
- case 'J':
- case 'D':
- return pos + 1;
- default:
- return checkFieldTypeSignature(signature, pos);
- }
- }
-
- /**
- * Checks an identifier.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkIdentifier(final String signature, int pos) {
- if (!Character.isJavaIdentifierStart(getChar(signature, pos))) {
- throw new IllegalArgumentException(signature
- + ": identifier expected at index " + pos);
- }
- ++pos;
- while (Character.isJavaIdentifierPart(getChar(signature, pos))) {
- ++pos;
- }
- return pos;
- }
-
- /**
- * Checks a single character.
- *
- * @param signature a string containing the signature that must be checked.
- * @param pos index of first character to be checked.
- * @return the index of the first character after the checked part.
- */
- private static int checkChar(final char c, final String signature, int pos)
- {
- if (getChar(signature, pos) == c) {
- return pos + 1;
- }
- throw new IllegalArgumentException(signature + ": '" + c
- + "' expected at index " + pos);
- }
-
- /**
- * Returns the signature car at the given index.
- *
- * @param signature a signature.
- * @param pos an index in signature.
- * @return the character at the given index, or 0 if there is no such
- * character.
- */
- private static char getChar(final String signature, int pos) {
- return pos < signature.length() ? signature.charAt(pos) : (char) 0;
- }
-
- /**
* Checks that the given label is not null. This method can also check that
* the label has been visited.
*
- * @param label the label to be checked.
- * @param checkVisited <tt>true</tt> to check that the label has been
- * visited.
- * @param msg a message to be used in case of error.
+ * @param label
+ * the label to be checked.
+ * @param checkVisited
+ * <tt>true</tt> to check that the label has been visited.
+ * @param msg
+ * a message to be used in case of error.
*/
- void checkLabel(
- final Label label,
- final boolean checkVisited,
- final String msg)
- {
+ void checkLabel(final Label label, final boolean checkVisited,
+ final String msg) {
if (label == null) {
throw new IllegalArgumentException("Invalid " + msg
+ " (must not be null)");
@@ -1620,7 +1346,8 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Checks that the given label is not a label used only for debug purposes.
*
- * @param label the label to be checked.
+ * @param label
+ * the label to be checked.
*/
private static void checkNonDebugLabel(final Label label) {
Field f = getLabelStatusField();
@@ -1631,7 +1358,8 @@ public class CheckMethodAdapter extends MethodVisitor {
throw new Error("Internal error");
}
if ((status & 0x01) != 0) {
- throw new IllegalArgumentException("Labels used for debug info cannot be reused for control flow");
+ throw new IllegalArgumentException(
+ "Labels used for debug info cannot be reused for control flow");
}
}
@@ -1653,7 +1381,8 @@ public class CheckMethodAdapter extends MethodVisitor {
/**
* Returns the field of the Label class whose name is given.
*
- * @param name a field name.
+ * @param name
+ * a field name.
* @return the field of the Label class whose name is given, or null.
*/
private static Field getLabelField(final String name) {
diff --git a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
index 3a6c3e780f..e69302b8a6 100644
--- a/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
+++ b/src/asm/scala/tools/asm/util/CheckSignatureAdapter.java
@@ -41,19 +41,22 @@ public class CheckSignatureAdapter extends SignatureVisitor {
/**
* Type to be used to check class signatures. See
- * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+ * CheckSignatureAdapter}.
*/
public static final int CLASS_SIGNATURE = 0;
/**
* Type to be used to check method signatures. See
- * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+ * CheckSignatureAdapter}.
*/
public static final int METHOD_SIGNATURE = 1;
/**
* Type to be used to check type signatures.See
- * {@link #CheckSignatureAdapter(int, SignatureVisitor) CheckSignatureAdapter}.
+ * {@link #CheckSignatureAdapter(int, SignatureVisitor)
+ * CheckSignatureAdapter}.
*/
public static final int TYPE_SIGNATURE = 2;
@@ -101,11 +104,13 @@ public class CheckSignatureAdapter extends SignatureVisitor {
* not use this constructor</i>. Instead, they must use the
* {@link #CheckSignatureAdapter(int, int, SignatureVisitor)} version.
*
- * @param type the type of signature to be checked. See
- * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
- * {@link #TYPE_SIGNATURE}.
- * @param sv the visitor to which this adapter must delegate calls. May be
- * <tt>null</tt>.
+ * @param type
+ * the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv
+ * the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
*/
public CheckSignatureAdapter(final int type, final SignatureVisitor sv) {
this(Opcodes.ASM4, type, sv);
@@ -114,19 +119,19 @@ public class CheckSignatureAdapter extends SignatureVisitor {
/**
* Creates a new {@link CheckSignatureAdapter} object.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
- * @param type the type of signature to be checked. See
- * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
- * {@link #TYPE_SIGNATURE}.
- * @param sv the visitor to which this adapter must delegate calls. May be
- * <tt>null</tt>.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
+ * @param type
+ * the type of signature to be checked. See
+ * {@link #CLASS_SIGNATURE}, {@link #METHOD_SIGNATURE} and
+ * {@link #TYPE_SIGNATURE}.
+ * @param sv
+ * the visitor to which this adapter must delegate calls. May be
+ * <tt>null</tt>.
*/
- protected CheckSignatureAdapter(
- final int api,
- final int type,
- final SignatureVisitor sv)
- {
+ protected CheckSignatureAdapter(final int api, final int type,
+ final SignatureVisitor sv) {
super(api);
this.type = type;
this.state = EMPTY;
@@ -138,8 +143,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public void visitFormalTypeParameter(final String name) {
if (type == TYPE_SIGNATURE
- || (state != EMPTY && state != FORMAL && state != BOUND))
- {
+ || (state != EMPTY && state != FORMAL && state != BOUND)) {
throw new IllegalStateException();
}
CheckMethodAdapter.checkIdentifier(name, "formal type parameter");
@@ -172,8 +176,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public SignatureVisitor visitSuperclass() {
- if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0)
- {
+ if (type != CLASS_SIGNATURE || (state & (EMPTY | FORMAL | BOUND)) == 0) {
throw new IllegalArgumentException();
}
state = SUPER;
@@ -195,8 +198,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public SignatureVisitor visitParameterType() {
if (type != METHOD_SIGNATURE
- || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
- {
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0) {
throw new IllegalArgumentException();
}
state = PARAM;
@@ -207,8 +209,7 @@ public class CheckSignatureAdapter extends SignatureVisitor {
@Override
public SignatureVisitor visitReturnType() {
if (type != METHOD_SIGNATURE
- || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0)
- {
+ || (state & (EMPTY | FORMAL | BOUND | PARAM)) == 0) {
throw new IllegalArgumentException();
}
state = RETURN;
diff --git a/src/asm/scala/tools/asm/util/Printer.java b/src/asm/scala/tools/asm/util/Printer.java
index c39fd548ce..86e0f9e122 100644
--- a/src/asm/scala/tools/asm/util/Printer.java
+++ b/src/asm/scala/tools/asm/util/Printer.java
@@ -52,14 +52,14 @@ public abstract class Printer {
/**
* The names of the for <code>operand</code> parameter values of the
- * {@link org.objectweb.asm.MethodVisitor#visitIntInsn} method when
+ * {@link scala.tools.asm.MethodVisitor#visitIntInsn} method when
* <code>opcode</code> is <code>NEWARRAY</code>.
*/
public static final String[] TYPES;
/**
* The names of the <code>tag</code> field values for
- * {@link org.objectweb.asm.Handle}.
+ * {@link scala.tools.asm.Handle}.
*/
public static final String[] HANDLE_TAG;
@@ -103,8 +103,8 @@ public abstract class Printer {
}
s = "H_GETFIELD,H_GETSTATIC,H_PUTFIELD,H_PUTSTATIC,"
- + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
- + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
+ + "H_INVOKEVIRTUAL,H_INVOKESTATIC,H_INVOKESPECIAL,"
+ + "H_NEWINVOKESPECIAL,H_INVOKEINTERFACE,";
HANDLE_TAG = new String[10];
j = 0;
i = 1;
@@ -149,81 +149,58 @@ public abstract class Printer {
}
/**
- * Class header.
- * See {@link org.objectweb.asm.ClassVisitor#visit}.
+ * Class header. See {@link scala.tools.asm.ClassVisitor#visit}.
*/
- public abstract void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces);
+ public abstract void visit(final int version, final int access,
+ final String name, final String signature, final String superName,
+ final String[] interfaces);
/**
- * Class source.
- * See {@link org.objectweb.asm.ClassVisitor#visitSource}.
+ * Class source. See {@link scala.tools.asm.ClassVisitor#visitSource}.
*/
public abstract void visitSource(final String file, final String debug);
/**
- * Class outer class.
- * See {@link org.objectweb.asm.ClassVisitor#visitOuterClass}.
+ * Class outer class. See
+ * {@link scala.tools.asm.ClassVisitor#visitOuterClass}.
*/
- public abstract void visitOuterClass(
- final String owner,
- final String name,
- final String desc);
+ public abstract void visitOuterClass(final String owner, final String name,
+ final String desc);
/**
- * Class annotation.
- * See {@link org.objectweb.asm.ClassVisitor#visitAnnotation}.
+ * Class annotation. See
+ * {@link scala.tools.asm.ClassVisitor#visitAnnotation}.
*/
- public abstract Printer visitClassAnnotation(
- final String desc,
- final boolean visible);
+ public abstract Printer visitClassAnnotation(final String desc,
+ final boolean visible);
/**
- * Class attribute.
- * See {@link org.objectweb.asm.ClassVisitor#visitAttribute}.
+ * Class attribute. See
+ * {@link scala.tools.asm.ClassVisitor#visitAttribute}.
*/
public abstract void visitClassAttribute(final Attribute attr);
/**
- * Class inner name.
- * See {@link org.objectweb.asm.ClassVisitor#visitInnerClass}.
+ * Class inner name. See
+ * {@link scala.tools.asm.ClassVisitor#visitInnerClass}.
*/
- public abstract void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access);
+ public abstract void visitInnerClass(final String name,
+ final String outerName, final String innerName, final int access);
/**
- * Class field.
- * See {@link org.objectweb.asm.ClassVisitor#visitField}.
+ * Class field. See {@link scala.tools.asm.ClassVisitor#visitField}.
*/
- public abstract Printer visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value);
+ public abstract Printer visitField(final int access, final String name,
+ final String desc, final String signature, final Object value);
/**
- * Class method.
- * See {@link org.objectweb.asm.ClassVisitor#visitMethod}.
+ * Class method. See {@link scala.tools.asm.ClassVisitor#visitMethod}.
*/
- public abstract Printer visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions);
+ public abstract Printer visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions);
/**
- * Class end.
- * See {@link org.objectweb.asm.ClassVisitor#visitEnd}.
+ * Class end. See {@link scala.tools.asm.ClassVisitor#visitEnd}.
*/
public abstract void visitClassEnd();
@@ -232,37 +209,31 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
- * Annotation value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visit}.
+ * Annotation value. See {@link scala.tools.asm.AnnotationVisitor#visit}.
*/
public abstract void visit(final String name, final Object value);
/**
- * Annotation enum value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitEnum}.
+ * Annotation enum value. See
+ * {@link scala.tools.asm.AnnotationVisitor#visitEnum}.
*/
- public abstract void visitEnum(
- final String name,
- final String desc,
- final String value);
+ public abstract void visitEnum(final String name, final String desc,
+ final String value);
/**
- * Nested annotation value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitAnnotation}.
+ * Nested annotation value. See
+ * {@link scala.tools.asm.AnnotationVisitor#visitAnnotation}.
*/
- public abstract Printer visitAnnotation(
- final String name,
- final String desc);
+ public abstract Printer visitAnnotation(final String name, final String desc);
/**
- * Annotation array value.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitArray}.
+ * Annotation array value. See
+ * {@link scala.tools.asm.AnnotationVisitor#visitArray}.
*/
public abstract Printer visitArray(final String name);
/**
- * Annotation end.
- * See {@link org.objectweb.asm.AnnotationVisitor#visitEnd}.
+ * Annotation end. See {@link scala.tools.asm.AnnotationVisitor#visitEnd}.
*/
public abstract void visitAnnotationEnd();
@@ -271,22 +242,20 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
- * Field annotation.
- * See {@link org.objectweb.asm.FieldVisitor#visitAnnotation}.
+ * Field annotation. See
+ * {@link scala.tools.asm.FieldVisitor#visitAnnotation}.
*/
- public abstract Printer visitFieldAnnotation(
- final String desc,
- final boolean visible);
+ public abstract Printer visitFieldAnnotation(final String desc,
+ final boolean visible);
/**
- * Field attribute.
- * See {@link org.objectweb.asm.FieldVisitor#visitAttribute}.
+ * Field attribute. See
+ * {@link scala.tools.asm.FieldVisitor#visitAttribute}.
*/
public abstract void visitFieldAttribute(final Attribute attr);
/**
- * Field end.
- * See {@link org.objectweb.asm.FieldVisitor#visitEnd}.
+ * Field end. See {@link scala.tools.asm.FieldVisitor#visitEnd}.
*/
public abstract void visitFieldEnd();
@@ -295,193 +264,161 @@ public abstract class Printer {
// ------------------------------------------------------------------------
/**
- * Method default annotation.
- * See {@link org.objectweb.asm.MethodVisitor#visitAnnotationDefault}.
+ * Method default annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitAnnotationDefault}.
*/
public abstract Printer visitAnnotationDefault();
/**
- * Method annotation.
- * See {@link org.objectweb.asm.MethodVisitor#visitAnnotation}.
+ * Method annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitAnnotation}.
*/
- public abstract Printer visitMethodAnnotation(
- final String desc,
- final boolean visible);
+ public abstract Printer visitMethodAnnotation(final String desc,
+ final boolean visible);
/**
- * Method parameter annotation.
- * See {@link org.objectweb.asm.MethodVisitor#visitParameterAnnotation}.
+ * Method parameter annotation. See
+ * {@link scala.tools.asm.MethodVisitor#visitParameterAnnotation}.
*/
- public abstract Printer visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible);
+ public abstract Printer visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible);
/**
- * Method attribute.
- * See {@link org.objectweb.asm.MethodVisitor#visitAttribute}.
+ * Method attribute. See
+ * {@link scala.tools.asm.MethodVisitor#visitAttribute}.
*/
public abstract void visitMethodAttribute(final Attribute attr);
/**
- * Method start.
- * See {@link org.objectweb.asm.MethodVisitor#visitCode}.
+ * Method start. See {@link scala.tools.asm.MethodVisitor#visitCode}.
*/
public abstract void visitCode();
/**
- * Method stack frame.
- * See {@link org.objectweb.asm.MethodVisitor#visitFrame}.
+ * Method stack frame. See
+ * {@link scala.tools.asm.MethodVisitor#visitFrame}.
*/
- public abstract void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack);
+ public abstract void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitInsn}.
+ * Method instruction. See {@link scala.tools.asm.MethodVisitor#visitInsn}
+ * .
*/
public abstract void visitInsn(final int opcode);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitIntInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitIntInsn}.
*/
public abstract void visitIntInsn(final int opcode, final int operand);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitVarInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitVarInsn}.
*/
public abstract void visitVarInsn(final int opcode, final int var);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitTypeInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitTypeInsn}.
*/
public abstract void visitTypeInsn(final int opcode, final String type);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitFieldInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitFieldInsn}.
*/
- public abstract void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc);
+ public abstract void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitMethodInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitMethodInsn}.
*/
- public abstract void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc);
+ public abstract void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitInvokeDynamicInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitInvokeDynamicInsn}.
*/
- public abstract void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs);
+ public abstract void visitInvokeDynamicInsn(String name, String desc,
+ Handle bsm, Object... bsmArgs);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitJumpInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitJumpInsn}.
*/
public abstract void visitJumpInsn(final int opcode, final Label label);
/**
- * Method label.
- * See {@link org.objectweb.asm.MethodVisitor#visitLabel}.
+ * Method label. See {@link scala.tools.asm.MethodVisitor#visitLabel}.
*/
public abstract void visitLabel(final Label label);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitLdcInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitLdcInsn}.
*/
public abstract void visitLdcInsn(final Object cst);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitIincInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitIincInsn}.
*/
public abstract void visitIincInsn(final int var, final int increment);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitTableSwitchInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitTableSwitchInsn}.
*/
- public abstract void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels);
+ public abstract void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitLookupSwitchInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitLookupSwitchInsn}.
*/
- public abstract void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels);
+ public abstract void visitLookupSwitchInsn(final Label dflt,
+ final int[] keys, final Label[] labels);
/**
- * Method instruction.
- * See {@link org.objectweb.asm.MethodVisitor#visitMultiANewArrayInsn}.
+ * Method instruction. See
+ * {@link scala.tools.asm.MethodVisitor#visitMultiANewArrayInsn}.
*/
- public abstract void visitMultiANewArrayInsn(
- final String desc,
- final int dims);
+ public abstract void visitMultiANewArrayInsn(final String desc,
+ final int dims);
/**
- * Method exception handler.
- * See {@link org.objectweb.asm.MethodVisitor#visitTryCatchBlock}.
+ * Method exception handler. See
+ * {@link scala.tools.asm.MethodVisitor#visitTryCatchBlock}.
*/
- public abstract void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type);
+ public abstract void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type);
/**
- * Method debug info.
- * See {@link org.objectweb.asm.MethodVisitor#visitLocalVariable}.
+ * Method debug info. See
+ * {@link scala.tools.asm.MethodVisitor#visitLocalVariable}.
*/
- public abstract void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index);
+ public abstract void visitLocalVariable(final String name,
+ final String desc, final String signature, final Label start,
+ final Label end, final int index);
/**
- * Method debug info.
- * See {@link org.objectweb.asm.MethodVisitor#visitLineNumber}.
+ * Method debug info. See
+ * {@link scala.tools.asm.MethodVisitor#visitLineNumber}.
*/
public abstract void visitLineNumber(final int line, final Label start);
/**
- * Method max stack and max locals.
- * See {@link org.objectweb.asm.MethodVisitor#visitMaxs}.
+ * Method max stack and max locals. See
+ * {@link scala.tools.asm.MethodVisitor#visitMaxs}.
*/
public abstract void visitMaxs(final int maxStack, final int maxLocals);
/**
- * Method end.
- * See {@link org.objectweb.asm.MethodVisitor#visitEnd}.
+ * Method end. See {@link scala.tools.asm.MethodVisitor#visitEnd}.
*/
public abstract void visitMethodEnd();
@@ -497,7 +434,8 @@ public abstract class Printer {
/**
* Prints the text constructed by this visitor.
*
- * @param pw the print writer to be used.
+ * @param pw
+ * the print writer to be used.
*/
public void print(final PrintWriter pw) {
printList(pw, text);
@@ -506,8 +444,10 @@ public abstract class Printer {
/**
* Appends a quoted string to a given buffer.
*
- * @param buf the buffer where the string must be added.
- * @param s the string to be added.
+ * @param buf
+ * the buffer where the string must be added.
+ * @param s
+ * the string to be added.
*/
public static void appendString(final StringBuffer buf, final String s) {
buf.append('\"');
@@ -541,9 +481,11 @@ public abstract class Printer {
/**
* Prints the given string tree.
*
- * @param pw the writer to be used to print the tree.
- * @param l a string tree, i.e., a string list that can contain other string
- * lists, and so on recursively.
+ * @param pw
+ * the writer to be used to print the tree.
+ * @param l
+ * a string tree, i.e., a string list that can contain other
+ * string lists, and so on recursively.
*/
static void printList(final PrintWriter pw, final List<?> l) {
for (int i = 0; i < l.size(); ++i) {
diff --git a/src/asm/scala/tools/asm/util/SignatureChecker.java b/src/asm/scala/tools/asm/util/SignatureChecker.java
deleted file mode 100644
index 71f0d80027..0000000000
--- a/src/asm/scala/tools/asm/util/SignatureChecker.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- */
-
-package scala.tools.asm.util;
-
-import scala.tools.asm.util.CheckMethodAdapter;
-import scala.tools.asm.MethodVisitor;
-
-/**
- * A subclass of ASM's CheckMethodAdapter for the sole purpose of accessing some protected methods there.
- *
- */
-public class SignatureChecker extends CheckMethodAdapter {
-
- public SignatureChecker(final MethodVisitor mv) {
- super(mv);
- }
-
- /**
- * Checks a class signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- public static void checkClassSignature(final String signature) {
- CheckMethodAdapter.checkClassSignature(signature);
- }
-
- /**
- * Checks a method signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- public static void checkMethodSignature(final String signature) {
- CheckMethodAdapter.checkMethodSignature(signature);
- }
-
- /**
- * Checks a field signature.
- *
- * @param signature a string containing the signature that must be checked.
- */
- public static void checkFieldSignature(final String signature) {
- CheckMethodAdapter.checkFieldSignature(signature);
- }
-
-}
diff --git a/src/asm/scala/tools/asm/util/Textifiable.java b/src/asm/scala/tools/asm/util/Textifiable.java
index b80d0139db..85e051e2f8 100644
--- a/src/asm/scala/tools/asm/util/Textifiable.java
+++ b/src/asm/scala/tools/asm/util/Textifiable.java
@@ -34,7 +34,7 @@ import java.util.Map;
import scala.tools.asm.Label;
/**
- * An {@link org.objectweb.asm.Attribute Attribute} that can print a readable
+ * An {@link scala.tools.asm.Attribute Attribute} that can print a readable
* representation of itself.
*
* Implementations should construct readable output from an attribute data
@@ -47,8 +47,10 @@ public interface Textifiable {
/**
* Build a human readable representation of this attribute.
*
- * @param buf a buffer used for printing Java code.
- * @param labelNames map of label instances to their names.
+ * @param buf
+ * a buffer used for printing Java code.
+ * @param labelNames
+ * map of label instances to their names.
*/
void textify(StringBuffer buf, Map<Label, String> labelNames);
}
diff --git a/src/asm/scala/tools/asm/util/Textifier.java b/src/asm/scala/tools/asm/util/Textifier.java
index 8d40ebd026..a5c4f6779e 100644
--- a/src/asm/scala/tools/asm/util/Textifier.java
+++ b/src/asm/scala/tools/asm/util/Textifier.java
@@ -149,22 +149,24 @@ public class Textifier extends Printer {
/**
* Constructs a new {@link Textifier}.
*
- * @param api the ASM API version implemented by this visitor. Must be one
- * of {@link Opcodes#ASM4}.
+ * @param api
+ * the ASM API version implemented by this visitor. Must be one
+ * of {@link Opcodes#ASM4}.
*/
protected Textifier(final int api) {
super(api);
}
/**
- * Prints a disassembled view of the given class to the standard output. <p>
- * Usage: Textifier [-debug] &lt;binary class name or class
- * file name &gt;
+ * Prints a disassembled view of the given class to the standard output.
+ * <p>
+ * Usage: Textifier [-debug] &lt;binary class name or class file name &gt;
*
- * @param args the command line arguments.
+ * @param args
+ * the command line arguments.
*
- * @throws Exception if the class cannot be found, or if an IO exception
- * occurs.
+ * @throws Exception
+ * if the class cannot be found, or if an IO exception occurs.
*/
public static void main(final String[] args) throws Exception {
int i = 0;
@@ -182,21 +184,20 @@ public class Textifier extends Printer {
}
}
if (!ok) {
- System.err.println("Prints a disassembled view of the given class.");
+ System.err
+ .println("Prints a disassembled view of the given class.");
System.err.println("Usage: Textifier [-debug] "
+ "<fully qualified class name or class file name>");
return;
}
ClassReader cr;
if (args[i].endsWith(".class") || args[i].indexOf('\\') > -1
- || args[i].indexOf('/') > -1)
- {
+ || args[i].indexOf('/') > -1) {
cr = new ClassReader(new FileInputStream(args[i]));
} else {
cr = new ClassReader(args[i]);
}
- cr.accept(new TraceClassVisitor(new PrintWriter(System.out)),
- flags);
+ cr.accept(new TraceClassVisitor(new PrintWriter(System.out)), flags);
}
// ------------------------------------------------------------------------
@@ -204,38 +205,27 @@ public class Textifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
int major = version & 0xFFFF;
int minor = version >>> 16;
buf.setLength(0);
- buf.append("// class version ")
- .append(major)
- .append('.')
- .append(minor)
- .append(" (")
- .append(version)
- .append(")\n");
+ buf.append("// class version ").append(major).append('.').append(minor)
+ .append(" (").append(version).append(")\n");
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
buf.append("// DEPRECATED\n");
}
- buf.append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ buf.append("// access flags 0x")
+ .append(Integer.toHexString(access).toUpperCase()).append('\n');
appendDescriptor(CLASS_SIGNATURE, signature);
if (signature != null) {
TraceSignatureVisitor sv = new TraceSignatureVisitor(access);
SignatureReader r = new SignatureReader(signature);
r.accept(sv);
- buf.append("// declaration: ")
- .append(name)
- .append(sv.getDeclaration())
- .append('\n');
+ buf.append("// declaration: ").append(name)
+ .append(sv.getDeclaration()).append('\n');
}
appendAccess(access & ~Opcodes.ACC_SUPER);
@@ -269,15 +259,11 @@ public class Textifier extends Printer {
public void visitSource(final String file, final String debug) {
buf.setLength(0);
if (file != null) {
- buf.append(tab)
- .append("// compiled from: ")
- .append(file)
+ buf.append(tab).append("// compiled from: ").append(file)
.append('\n');
}
if (debug != null) {
- buf.append(tab)
- .append("// debug info: ")
- .append(debug)
+ buf.append(tab).append("// debug info: ").append(debug)
.append('\n');
}
if (buf.length() > 0) {
@@ -286,11 +272,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
buf.setLength(0);
buf.append(tab).append("OUTERCLASS ");
appendDescriptor(INTERNAL_NAME, owner);
@@ -304,10 +287,8 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitClassAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitClassAnnotation(final String desc,
+ final boolean visible) {
text.add("\n");
return visitAnnotation(desc, visible);
}
@@ -319,15 +300,13 @@ public class Textifier extends Printer {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
buf.setLength(0);
buf.append(tab).append("// access flags 0x");
- buf.append(Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase()).append('\n');
+ buf.append(
+ Integer.toHexString(access & ~Opcodes.ACC_SUPER).toUpperCase())
+ .append('\n');
buf.append(tab);
appendAccess(access);
buf.append("INNERCLASS ");
@@ -341,19 +320,15 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
+ public Textifier visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
buf.setLength(0);
buf.append('\n');
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
buf.append(tab).append("// DEPRECATED\n");
}
- buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ buf.append(tab).append("// access flags 0x")
+ .append(Integer.toHexString(access).toUpperCase()).append('\n');
if (signature != null) {
buf.append(tab);
appendDescriptor(FIELD_SIGNATURE, signature);
@@ -361,10 +336,8 @@ public class Textifier extends Printer {
TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
SignatureReader r = new SignatureReader(signature);
r.acceptType(sv);
- buf.append(tab)
- .append("// declaration: ")
- .append(sv.getDeclaration())
- .append('\n');
+ buf.append(tab).append("// declaration: ")
+ .append(sv.getDeclaration()).append('\n');
}
buf.append(tab);
@@ -390,19 +363,15 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
+ public Textifier visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
buf.setLength(0);
buf.append('\n');
if ((access & Opcodes.ACC_DEPRECATED) != 0) {
buf.append(tab).append("// DEPRECATED\n");
}
- buf.append(tab).append("// access flags 0x").append(Integer.toHexString(access).toUpperCase()).append('\n');
+ buf.append(tab).append("// access flags 0x")
+ .append(Integer.toHexString(access).toUpperCase()).append('\n');
if (signature != null) {
buf.append(tab);
@@ -415,12 +384,8 @@ public class Textifier extends Printer {
String genericReturn = v.getReturnType();
String genericExceptions = v.getExceptions();
- buf.append(tab)
- .append("// declaration: ")
- .append(genericReturn)
- .append(' ')
- .append(name)
- .append(genericDecl);
+ buf.append(tab).append("// declaration: ").append(genericReturn)
+ .append(' ').append(name).append(genericDecl);
if (genericExceptions != null) {
buf.append(" throws ").append(genericExceptions);
}
@@ -593,11 +558,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
buf.setLength(0);
appendComa(valueNumber++);
if (name != null) {
@@ -609,10 +571,7 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitAnnotation(
- final String name,
- final String desc)
- {
+ public Textifier visitAnnotation(final String name, final String desc) {
buf.setLength(0);
appendComa(valueNumber++);
if (name != null) {
@@ -629,9 +588,7 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitArray(
- final String name)
- {
+ public Textifier visitArray(final String name) {
buf.setLength(0);
appendComa(valueNumber++);
if (name != null) {
@@ -654,10 +611,8 @@ public class Textifier extends Printer {
// ------------------------------------------------------------------------
@Override
- public Textifier visitFieldAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitFieldAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@@ -684,19 +639,14 @@ public class Textifier extends Printer {
}
@Override
- public Textifier visitMethodAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitMethodAnnotation(final String desc,
+ final boolean visible) {
return visitAnnotation(desc, visible);
}
@Override
- public Textifier visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
+ public Textifier visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
buf.setLength(0);
buf.append(tab2).append('@');
appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -730,40 +680,35 @@ public class Textifier extends Printer {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
buf.setLength(0);
buf.append(ltab);
buf.append("FRAME ");
switch (type) {
- case Opcodes.F_NEW:
- case Opcodes.F_FULL:
- buf.append("FULL [");
- appendFrameTypes(nLocal, local);
- buf.append("] [");
- appendFrameTypes(nStack, stack);
- buf.append(']');
- break;
- case Opcodes.F_APPEND:
- buf.append("APPEND [");
- appendFrameTypes(nLocal, local);
- buf.append(']');
- break;
- case Opcodes.F_CHOP:
- buf.append("CHOP ").append(nLocal);
- break;
- case Opcodes.F_SAME:
- buf.append("SAME");
- break;
- case Opcodes.F_SAME1:
- buf.append("SAME1 ");
- appendFrameTypes(1, stack);
- break;
+ case Opcodes.F_NEW:
+ case Opcodes.F_FULL:
+ buf.append("FULL [");
+ appendFrameTypes(nLocal, local);
+ buf.append("] [");
+ appendFrameTypes(nStack, stack);
+ buf.append(']');
+ break;
+ case Opcodes.F_APPEND:
+ buf.append("APPEND [");
+ appendFrameTypes(nLocal, local);
+ buf.append(']');
+ break;
+ case Opcodes.F_CHOP:
+ buf.append("CHOP ").append(nLocal);
+ break;
+ case Opcodes.F_SAME:
+ buf.append("SAME");
+ break;
+ case Opcodes.F_SAME1:
+ buf.append("SAME1 ");
+ appendFrameTypes(1, stack);
+ break;
}
buf.append('\n');
text.add(buf.toString());
@@ -782,20 +727,15 @@ public class Textifier extends Printer {
buf.append(tab2)
.append(OPCODES[opcode])
.append(' ')
- .append(opcode == Opcodes.NEWARRAY
- ? TYPES[operand]
- : Integer.toString(operand))
- .append('\n');
+ .append(opcode == Opcodes.NEWARRAY ? TYPES[operand] : Integer
+ .toString(operand)).append('\n');
text.add(buf.toString());
}
@Override
public void visitVarInsn(final int opcode, final int var) {
buf.setLength(0);
- buf.append(tab2)
- .append(OPCODES[opcode])
- .append(' ')
- .append(var)
+ buf.append(tab2).append(OPCODES[opcode]).append(' ').append(var)
.append('\n');
text.add(buf.toString());
}
@@ -810,12 +750,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
buf.append(tab2).append(OPCODES[opcode]).append(' ');
appendDescriptor(INTERNAL_NAME, owner);
@@ -826,12 +762,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
buf.setLength(0);
buf.append(tab2).append(OPCODES[opcode]).append(' ');
appendDescriptor(INTERNAL_NAME, owner);
@@ -842,12 +774,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
buf.setLength(0);
buf.append(tab2).append("INVOKEDYNAMIC").append(' ');
buf.append(name);
@@ -855,11 +783,11 @@ public class Textifier extends Printer {
buf.append(" [");
appendHandle(bsm);
buf.append(tab3).append("// arguments:");
- if(bsmArgs.length == 0) {
+ if (bsmArgs.length == 0) {
buf.append(" none");
} else {
buf.append('\n').append(tab3);
- for(int i = 0; i < bsmArgs.length; i++) {
+ for (int i = 0; i < bsmArgs.length; i++) {
Object cst = bsmArgs[i];
if (cst instanceof String) {
Printer.appendString(buf, (String) cst);
@@ -915,22 +843,14 @@ public class Textifier extends Printer {
@Override
public void visitIincInsn(final int var, final int increment) {
buf.setLength(0);
- buf.append(tab2)
- .append("IINC ")
- .append(var)
- .append(' ')
- .append(increment)
- .append('\n');
+ buf.append(tab2).append("IINC ").append(var).append(' ')
+ .append(increment).append('\n');
text.add(buf.toString());
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
buf.setLength(0);
buf.append(tab2).append("TABLESWITCH\n");
for (int i = 0; i < labels.length; ++i) {
@@ -945,11 +865,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
buf.setLength(0);
buf.append(tab2).append("LOOKUPSWITCH\n");
for (int i = 0; i < labels.length; ++i) {
@@ -973,12 +890,8 @@ public class Textifier extends Printer {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
buf.setLength(0);
buf.append(tab2).append("TRYCATCHBLOCK ");
appendLabel(start);
@@ -993,14 +906,9 @@ public class Textifier extends Printer {
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
buf.setLength(0);
buf.append(tab2).append("LOCALVARIABLE ").append(name).append(' ');
appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -1017,10 +925,8 @@ public class Textifier extends Printer {
TraceSignatureVisitor sv = new TraceSignatureVisitor(0);
SignatureReader r = new SignatureReader(signature);
r.acceptType(sv);
- buf.append(tab2)
- .append("// declaration: ")
- .append(sv.getDeclaration())
- .append('\n');
+ buf.append(tab2).append("// declaration: ")
+ .append(sv.getDeclaration()).append('\n');
}
text.add(buf.toString());
}
@@ -1056,14 +962,13 @@ public class Textifier extends Printer {
/**
* Prints a disassembled view of the given annotation.
*
- * @param desc the class descriptor of the annotation class.
- * @param visible <tt>true</tt> if the annotation is visible at runtime.
+ * @param desc
+ * the class descriptor of the annotation class.
+ * @param visible
+ * <tt>true</tt> if the annotation is visible at runtime.
* @return a visitor to visit the annotation values.
*/
- public Textifier visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public Textifier visitAnnotation(final String desc, final boolean visible) {
buf.setLength(0);
buf.append(tab).append('@');
appendDescriptor(FIELD_DESCRIPTOR, desc);
@@ -1078,7 +983,8 @@ public class Textifier extends Printer {
/**
* Prints a disassembled view of the given attribute.
*
- * @param attr an attribute.
+ * @param attr
+ * an attribute.
*/
public void visitAttribute(final Attribute attr) {
buf.setLength(0);
@@ -1111,15 +1017,16 @@ public class Textifier extends Printer {
* Appends an internal name, a type descriptor or a type signature to
* {@link #buf buf}.
*
- * @param type indicates if desc is an internal name, a field descriptor, a
- * method descriptor, a class signature, ...
- * @param desc an internal name, type descriptor, or type signature. May be
- * <tt>null</tt>.
+ * @param type
+ * indicates if desc is an internal name, a field descriptor, a
+ * method descriptor, a class signature, ...
+ * @param desc
+ * an internal name, type descriptor, or type signature. May be
+ * <tt>null</tt>.
*/
protected void appendDescriptor(final int type, final String desc) {
if (type == CLASS_SIGNATURE || type == FIELD_SIGNATURE
- || type == METHOD_SIGNATURE)
- {
+ || type == METHOD_SIGNATURE) {
if (desc != null) {
buf.append("// signature ").append(desc).append('\n');
}
@@ -1132,7 +1039,8 @@ public class Textifier extends Printer {
* Appends the name of the given label to {@link #buf buf}. Creates a new
* label name if the given label does not yet have one.
*
- * @param l a label.
+ * @param l
+ * a label.
*/
protected void appendLabel(final Label l) {
if (labelNames == null) {
@@ -1149,40 +1057,42 @@ public class Textifier extends Printer {
/**
* Appends the information about the given handle to {@link #buf buf}.
*
- * @param h a handle, non null.
+ * @param h
+ * a handle, non null.
*/
protected void appendHandle(final Handle h) {
buf.append('\n').append(tab3);
int tag = h.getTag();
- buf.append("// handle kind 0x").append(Integer.toHexString(tag)).append(" : ");
+ buf.append("// handle kind 0x").append(Integer.toHexString(tag))
+ .append(" : ");
switch (tag) {
- case Opcodes.H_GETFIELD:
- buf.append("GETFIELD");
- break;
- case Opcodes.H_GETSTATIC:
- buf.append("GETSTATIC");
- break;
- case Opcodes.H_PUTFIELD:
- buf.append("PUTFIELD");
- break;
- case Opcodes.H_PUTSTATIC:
- buf.append("PUTSTATIC");
- break;
- case Opcodes.H_INVOKEINTERFACE:
- buf.append("INVOKEINTERFACE");
- break;
- case Opcodes.H_INVOKESPECIAL:
- buf.append("INVOKESPECIAL");
- break;
- case Opcodes.H_INVOKESTATIC:
- buf.append("INVOKESTATIC");
- break;
- case Opcodes.H_INVOKEVIRTUAL:
- buf.append("INVOKEVIRTUAL");
- break;
- case Opcodes.H_NEWINVOKESPECIAL:
- buf.append("NEWINVOKESPECIAL");
- break;
+ case Opcodes.H_GETFIELD:
+ buf.append("GETFIELD");
+ break;
+ case Opcodes.H_GETSTATIC:
+ buf.append("GETSTATIC");
+ break;
+ case Opcodes.H_PUTFIELD:
+ buf.append("PUTFIELD");
+ break;
+ case Opcodes.H_PUTSTATIC:
+ buf.append("PUTSTATIC");
+ break;
+ case Opcodes.H_INVOKEINTERFACE:
+ buf.append("INVOKEINTERFACE");
+ break;
+ case Opcodes.H_INVOKESPECIAL:
+ buf.append("INVOKESPECIAL");
+ break;
+ case Opcodes.H_INVOKESTATIC:
+ buf.append("INVOKESTATIC");
+ break;
+ case Opcodes.H_INVOKEVIRTUAL:
+ buf.append("INVOKEVIRTUAL");
+ break;
+ case Opcodes.H_NEWINVOKESPECIAL:
+ buf.append("NEWINVOKESPECIAL");
+ break;
}
buf.append('\n');
buf.append(tab3);
@@ -1195,10 +1105,11 @@ public class Textifier extends Printer {
}
/**
- * Appends a string representation of the given access modifiers to {@link
- * #buf buf}.
+ * Appends a string representation of the given access modifiers to
+ * {@link #buf buf}.
*
- * @param access some access modifiers.
+ * @param access
+ * some access modifiers.
*/
private void appendAccess(final int access) {
if ((access & Opcodes.ACC_PUBLIC) != 0) {
@@ -1231,6 +1142,9 @@ public class Textifier extends Printer {
if ((access & Opcodes.ACC_STRICT) != 0) {
buf.append("strictfp ");
}
+ if ((access & Opcodes.ACC_SYNTHETIC) != 0) {
+ buf.append("synthetic ");
+ }
if ((access & Opcodes.ACC_ENUM) != 0) {
buf.append("enum ");
}
@@ -1256,27 +1170,27 @@ public class Textifier extends Printer {
}
} else if (o[i] instanceof Integer) {
switch (((Integer) o[i]).intValue()) {
- case 0:
- appendDescriptor(FIELD_DESCRIPTOR, "T");
- break;
- case 1:
- appendDescriptor(FIELD_DESCRIPTOR, "I");
- break;
- case 2:
- appendDescriptor(FIELD_DESCRIPTOR, "F");
- break;
- case 3:
- appendDescriptor(FIELD_DESCRIPTOR, "D");
- break;
- case 4:
- appendDescriptor(FIELD_DESCRIPTOR, "J");
- break;
- case 5:
- appendDescriptor(FIELD_DESCRIPTOR, "N");
- break;
- case 6:
- appendDescriptor(FIELD_DESCRIPTOR, "U");
- break;
+ case 0:
+ appendDescriptor(FIELD_DESCRIPTOR, "T");
+ break;
+ case 1:
+ appendDescriptor(FIELD_DESCRIPTOR, "I");
+ break;
+ case 2:
+ appendDescriptor(FIELD_DESCRIPTOR, "F");
+ break;
+ case 3:
+ appendDescriptor(FIELD_DESCRIPTOR, "D");
+ break;
+ case 4:
+ appendDescriptor(FIELD_DESCRIPTOR, "J");
+ break;
+ case 5:
+ appendDescriptor(FIELD_DESCRIPTOR, "N");
+ break;
+ case 6:
+ appendDescriptor(FIELD_DESCRIPTOR, "U");
+ break;
}
} else {
appendLabel((Label) o[i]);
diff --git a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
index f112609031..33e7cf0b26 100644
--- a/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceAnnotationVisitor.java
@@ -58,33 +58,26 @@ public final class TraceAnnotationVisitor extends AnnotationVisitor {
}
@Override
- public void visitEnum(
- final String name,
- final String desc,
- final String value)
- {
+ public void visitEnum(final String name, final String desc,
+ final String value) {
p.visitEnum(name, desc, value);
super.visitEnum(name, desc, value);
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String name,
- final String desc)
- {
+ public AnnotationVisitor visitAnnotation(final String name,
+ final String desc) {
Printer p = this.p.visitAnnotation(name, desc);
- AnnotationVisitor av = this.av == null
- ? null
- : this.av.visitAnnotation(name, desc);
+ AnnotationVisitor av = this.av == null ? null : this.av
+ .visitAnnotation(name, desc);
return new TraceAnnotationVisitor(av, p);
}
@Override
public AnnotationVisitor visitArray(final String name) {
Printer p = this.p.visitArray(name);
- AnnotationVisitor av = this.av == null
- ? null
- : this.av.visitArray(name);
+ AnnotationVisitor av = this.av == null ? null : this.av
+ .visitArray(name);
return new TraceAnnotationVisitor(av, p);
}
diff --git a/src/asm/scala/tools/asm/util/TraceClassVisitor.java b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
index bb830b71ce..ff7a017482 100644
--- a/src/asm/scala/tools/asm/util/TraceClassVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceClassVisitor.java
@@ -42,30 +42,41 @@ import scala.tools.asm.Opcodes;
* A {@link ClassVisitor} that prints the classes it visits with a
* {@link Printer}. This class visitor can be used in the middle of a class
* visitor chain to trace the class that is visited at a given point in this
- * chain. This may be useful for debugging purposes. <p> The trace printed when
- * visiting the <tt>Hello</tt> class is the following: <p> <blockquote>
- *
- * <pre> // class version 49.0 (49) // access flags 0x21 public class Hello {
- *
+ * chain. This may be useful for debugging purposes.
+ * <p>
+ * The trace printed when visiting the <tt>Hello</tt> class is the following:
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * // class version 49.0 (49) // access flags 0x21 public class Hello {
+ *
* // compiled from: Hello.java
- *
+ *
* // access flags 0x1 public &lt;init&gt; ()V ALOAD 0 INVOKESPECIAL
* java/lang/Object &lt;init&gt; ()V RETURN MAXSTACK = 1 MAXLOCALS = 1
- *
+ *
* // access flags 0x9 public static main ([Ljava/lang/String;)V GETSTATIC
* java/lang/System out Ljava/io/PrintStream; LDC &quot;hello&quot;
* INVOKEVIRTUAL java/io/PrintStream println (Ljava/lang/String;)V RETURN
- * MAXSTACK = 2 MAXLOCALS = 1 } </pre>
- *
- * </blockquote> where <tt>Hello</tt> is defined by: <p> <blockquote>
- *
- * <pre> public class Hello {
- *
- * public static void main(String[] args) {
- * System.out.println(&quot;hello&quot;); } } </pre>
- *
+ * MAXSTACK = 2 MAXLOCALS = 1 }
+ * </pre>
+ *
+ * </blockquote> where <tt>Hello</tt> is defined by:
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * public class Hello {
+ *
+ * public static void main(String[] args) {
+ * System.out.println(&quot;hello&quot;);
+ * }
+ * }
+ * </pre>
+ *
* </blockquote>
- *
+ *
* @author Eric Bruneton
* @author Eugene Kuleshov
*/
@@ -83,8 +94,9 @@ public final class TraceClassVisitor extends ClassVisitor {
/**
* Constructs a new {@link TraceClassVisitor}.
- *
- * @param pw the print writer to be used to print the class.
+ *
+ * @param pw
+ * the print writer to be used to print the class.
*/
public TraceClassVisitor(final PrintWriter pw) {
this(null, pw);
@@ -92,10 +104,12 @@ public final class TraceClassVisitor extends ClassVisitor {
/**
* Constructs a new {@link TraceClassVisitor}.
- *
- * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
- * May be <tt>null</tt>.
- * @param pw the print writer to be used to print the class.
+ *
+ * @param cv
+ * the {@link ClassVisitor} to which this visitor delegates
+ * calls. May be <tt>null</tt>.
+ * @param pw
+ * the print writer to be used to print the class.
*/
public TraceClassVisitor(final ClassVisitor cv, final PrintWriter pw) {
this(cv, new Textifier(), pw);
@@ -103,33 +117,28 @@ public final class TraceClassVisitor extends ClassVisitor {
/**
* Constructs a new {@link TraceClassVisitor}.
- *
- * @param cv the {@link ClassVisitor} to which this visitor delegates calls.
- * May be <tt>null</tt>.
- * @param p the object that actually converts visit events into text.
- * @param pw the print writer to be used to print the class. May be null if
- * you simply want to use the result via
- * {@link Printer#getText()}, instead of printing it.
+ *
+ * @param cv
+ * the {@link ClassVisitor} to which this visitor delegates
+ * calls. May be <tt>null</tt>.
+ * @param p
+ * the object that actually converts visit events into text.
+ * @param pw
+ * the print writer to be used to print the class. May be null if
+ * you simply want to use the result via
+ * {@link Printer#getText()}, instead of printing it.
*/
- public TraceClassVisitor(
- final ClassVisitor cv,
- final Printer p,
- final PrintWriter pw)
- {
+ public TraceClassVisitor(final ClassVisitor cv, final Printer p,
+ final PrintWriter pw) {
super(Opcodes.ASM4, cv);
this.pw = pw;
this.p = p;
}
@Override
- public void visit(
- final int version,
- final int access,
- final String name,
- final String signature,
- final String superName,
- final String[] interfaces)
- {
+ public void visit(final int version, final int access, final String name,
+ final String signature, final String superName,
+ final String[] interfaces) {
p.visit(version, access, name, signature, superName, interfaces);
super.visit(version, access, name, signature, superName, interfaces);
}
@@ -141,20 +150,15 @@ public final class TraceClassVisitor extends ClassVisitor {
}
@Override
- public void visitOuterClass(
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitOuterClass(final String owner, final String name,
+ final String desc) {
p.visitOuterClass(owner, name, desc);
super.visitOuterClass(owner, name, desc);
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
Printer p = this.p.visitClassAnnotation(desc, visible);
AnnotationVisitor av = cv == null ? null : cv.visitAnnotation(desc,
visible);
@@ -168,55 +172,28 @@ public final class TraceClassVisitor extends ClassVisitor {
}
@Override
- public void visitInnerClass(
- final String name,
- final String outerName,
- final String innerName,
- final int access)
- {
+ public void visitInnerClass(final String name, final String outerName,
+ final String innerName, final int access) {
p.visitInnerClass(name, outerName, innerName, access);
super.visitInnerClass(name, outerName, innerName, access);
}
@Override
- public FieldVisitor visitField(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final Object value)
- {
- Printer p = this.p.visitField(access,
- name,
- desc,
- signature,
- value);
- FieldVisitor fv = cv == null ? null : cv.visitField(access,
- name,
- desc,
- signature,
- value);
+ public FieldVisitor visitField(final int access, final String name,
+ final String desc, final String signature, final Object value) {
+ Printer p = this.p.visitField(access, name, desc, signature, value);
+ FieldVisitor fv = cv == null ? null : cv.visitField(access, name, desc,
+ signature, value);
return new TraceFieldVisitor(fv, p);
}
@Override
- public MethodVisitor visitMethod(
- final int access,
- final String name,
- final String desc,
- final String signature,
- final String[] exceptions)
- {
- Printer p = this.p.visitMethod(access,
- name,
- desc,
- signature,
- exceptions);
- MethodVisitor mv = cv == null ? null : cv.visitMethod(access,
- name,
- desc,
- signature,
+ public MethodVisitor visitMethod(final int access, final String name,
+ final String desc, final String signature, final String[] exceptions) {
+ Printer p = this.p.visitMethod(access, name, desc, signature,
exceptions);
+ MethodVisitor mv = cv == null ? null : cv.visitMethod(access, name,
+ desc, signature, exceptions);
return new TraceMethodVisitor(mv, p);
}
diff --git a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
index f537e83be1..9547a70008 100644
--- a/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceFieldVisitor.java
@@ -37,7 +37,7 @@ import scala.tools.asm.Opcodes;
/**
* A {@link FieldVisitor} that prints the fields it visits with a
* {@link Printer}.
- *
+ *
* @author Eric Bruneton
*/
public final class TraceFieldVisitor extends FieldVisitor {
@@ -52,12 +52,10 @@ public final class TraceFieldVisitor extends FieldVisitor {
super(Opcodes.ASM4, fv);
this.p = p;
}
-
+
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
Printer p = this.p.visitFieldAnnotation(desc, visible);
AnnotationVisitor av = fv == null ? null : fv.visitAnnotation(desc,
visible);
diff --git a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
index 9aabf2079e..9034567c8f 100644
--- a/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceMethodVisitor.java
@@ -56,10 +56,8 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitAnnotation(
- final String desc,
- final boolean visible)
- {
+ public AnnotationVisitor visitAnnotation(final String desc,
+ final boolean visible) {
Printer p = this.p.visitMethodAnnotation(desc, visible);
AnnotationVisitor av = mv == null ? null : mv.visitAnnotation(desc,
visible);
@@ -80,17 +78,11 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public AnnotationVisitor visitParameterAnnotation(
- final int parameter,
- final String desc,
- final boolean visible)
- {
- Printer p = this.p.visitParameterAnnotation(parameter,
- desc,
- visible);
- AnnotationVisitor av = mv == null
- ? null
- : mv.visitParameterAnnotation(parameter, desc, visible);
+ public AnnotationVisitor visitParameterAnnotation(final int parameter,
+ final String desc, final boolean visible) {
+ Printer p = this.p.visitParameterAnnotation(parameter, desc, visible);
+ AnnotationVisitor av = mv == null ? null : mv.visitParameterAnnotation(
+ parameter, desc, visible);
return new TraceAnnotationVisitor(av, p);
}
@@ -101,13 +93,8 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitFrame(
- final int type,
- final int nLocal,
- final Object[] local,
- final int nStack,
- final Object[] stack)
- {
+ public void visitFrame(final int type, final int nLocal,
+ final Object[] local, final int nStack, final Object[] stack) {
p.visitFrame(type, nLocal, local, nStack, stack);
super.visitFrame(type, nLocal, local, nStack, stack);
}
@@ -137,34 +124,22 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitFieldInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitFieldInsn(final int opcode, final String owner,
+ final String name, final String desc) {
p.visitFieldInsn(opcode, owner, name, desc);
super.visitFieldInsn(opcode, owner, name, desc);
}
@Override
- public void visitMethodInsn(
- final int opcode,
- final String owner,
- final String name,
- final String desc)
- {
+ public void visitMethodInsn(final int opcode, final String owner,
+ final String name, final String desc) {
p.visitMethodInsn(opcode, owner, name, desc);
super.visitMethodInsn(opcode, owner, name, desc);
}
@Override
- public void visitInvokeDynamicInsn(
- String name,
- String desc,
- Handle bsm,
- Object... bsmArgs)
- {
+ public void visitInvokeDynamicInsn(String name, String desc, Handle bsm,
+ Object... bsmArgs) {
p.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
super.visitInvokeDynamicInsn(name, desc, bsm, bsmArgs);
}
@@ -194,22 +169,15 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitTableSwitchInsn(
- final int min,
- final int max,
- final Label dflt,
- final Label... labels)
- {
+ public void visitTableSwitchInsn(final int min, final int max,
+ final Label dflt, final Label... labels) {
p.visitTableSwitchInsn(min, max, dflt, labels);
super.visitTableSwitchInsn(min, max, dflt, labels);
}
@Override
- public void visitLookupSwitchInsn(
- final Label dflt,
- final int[] keys,
- final Label[] labels)
- {
+ public void visitLookupSwitchInsn(final Label dflt, final int[] keys,
+ final Label[] labels) {
p.visitLookupSwitchInsn(dflt, keys, labels);
super.visitLookupSwitchInsn(dflt, keys, labels);
}
@@ -221,25 +189,16 @@ public final class TraceMethodVisitor extends MethodVisitor {
}
@Override
- public void visitTryCatchBlock(
- final Label start,
- final Label end,
- final Label handler,
- final String type)
- {
+ public void visitTryCatchBlock(final Label start, final Label end,
+ final Label handler, final String type) {
p.visitTryCatchBlock(start, end, handler, type);
super.visitTryCatchBlock(start, end, handler, type);
}
@Override
- public void visitLocalVariable(
- final String name,
- final String desc,
- final String signature,
- final Label start,
- final Label end,
- final int index)
- {
+ public void visitLocalVariable(final String name, final String desc,
+ final String signature, final Label start, final Label end,
+ final int index) {
p.visitLocalVariable(name, desc, signature, start, end, index);
super.visitLocalVariable(name, desc, signature, start, end, index);
}
diff --git a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
index a37b759811..1e23c7ef1a 100644
--- a/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
+++ b/src/asm/scala/tools/asm/util/TraceSignatureVisitor.java
@@ -117,8 +117,7 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
@Override
public SignatureVisitor visitInterface() {
- separator = seenInterface ? ", " : isInterface
- ? " extends "
+ separator = seenInterface ? ", " : isInterface ? " extends "
: " implements ";
seenInterface = true;
startType();
@@ -165,34 +164,34 @@ public final class TraceSignatureVisitor extends SignatureVisitor {
@Override
public void visitBaseType(final char descriptor) {
switch (descriptor) {
- case 'V':
- declaration.append("void");
- break;
- case 'B':
- declaration.append("byte");
- break;
- case 'J':
- declaration.append("long");
- break;
- case 'Z':
- declaration.append("boolean");
- break;
- case 'I':
- declaration.append("int");
- break;
- case 'S':
- declaration.append("short");
- break;
- case 'C':
- declaration.append("char");
- break;
- case 'F':
- declaration.append("float");
- break;
- // case 'D':
- default:
- declaration.append("double");
- break;
+ case 'V':
+ declaration.append("void");
+ break;
+ case 'B':
+ declaration.append("byte");
+ break;
+ case 'J':
+ declaration.append("long");
+ break;
+ case 'Z':
+ declaration.append("boolean");
+ break;
+ case 'I':
+ declaration.append("int");
+ break;
+ case 'S':
+ declaration.append("short");
+ break;
+ case 'C':
+ declaration.append("char");
+ break;
+ case 'F':
+ declaration.append("float");
+ break;
+ // case 'D':
+ default:
+ declaration.append("double");
+ break;
}
endType();
}
diff --git a/src/build/genprod.scala b/src/build/genprod.scala
index b9511c1ad2..cd01363cb6 100644
--- a/src/build/genprod.scala
+++ b/src/build/genprod.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -63,7 +63,7 @@ object genprod extends App {
def header = """
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -319,6 +319,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity {
* @constructor Create a new tuple with {i} elements.{idiomatic}
{params}
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class {className}{covariantArgs}({fields})
extends {Product.className(i)}{invariantArgs}
{{
diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
new file mode 100644
index 0000000000..32c6da8007
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala
@@ -0,0 +1,33 @@
+package scala.reflect.macros
+package compiler
+
+import scala.tools.nsc.Global
+import scala.reflect.macros.contexts.Context
+
+abstract class DefaultMacroCompiler extends Resolvers
+ with Validators
+ with Errors {
+ val global: Global
+ import global._
+
+ val typer: global.analyzer.Typer
+ private implicit val context0 = typer.context
+ val context = typer.context
+
+ val macroDdef: DefDef
+ lazy val macroDef = macroDdef.symbol
+
+ private case class MacroImplResolutionException(pos: Position, msg: String) extends Exception
+ def abort(pos: Position, msg: String) = throw MacroImplResolutionException(pos, msg)
+
+ def resolveMacroImpl: Tree = {
+ try {
+ validateMacroImplRef()
+ macroImplRef
+ } catch {
+ case MacroImplResolutionException(pos, msg) =>
+ context.error(pos, msg)
+ EmptyTree
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
new file mode 100644
index 0000000000..dd3142127e
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -0,0 +1,113 @@
+package scala.reflect.macros
+package compiler
+
+import scala.compat.Platform.EOL
+import scala.reflect.macros.util.Traces
+
+trait Errors extends Traces {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import typer.TyperErrorGen._
+ import typer.infer.InferErrorGen._
+ def globalSettings = global.settings
+
+ // sanity check errors
+
+ private def implRefError(message: String) = abort(macroDdef.pos, message)
+
+ def MacroImplReferenceWrongShapeError() = implRefError(
+ "macro implementation reference has wrong shape. required:\n"+
+ "macro [<static object>].<method name>[[<type args>]] or\n" +
+ "macro [<macro bundle>].<method name>[[<type args>]]")
+
+ def MacroImplNotPublicError() = implRefError("macro implementation must be public")
+
+ def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
+
+ def MacroImplWrongNumberOfTypeArgumentsError() = implRefError(TypedApplyWrongNumberOfTpeParametersErrorMessage(macroImplRef))
+
+ // compatibility errors
+
+ // helpers
+
+ private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
+ val noun = if (flavor == "value") "parameter" else "type parameter"
+ val message = noun + " lists have different length, " + violation + " extra " + noun
+ val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
+ message + suffix
+ }
+
+ private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+
+ private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
+ var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
+ if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
+ var retPart = restpe.toString
+ if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
+ argsPart + ": " + retPart
+ }
+
+ // not exactly an error generator, but very related
+ // and I dearly wanted to push it away from Macros.scala
+ private def checkConforms(slot: String, rtpe: Type, atpe: Type) = {
+ val verbose = macroDebugVerbose || settings.explaintypes.value
+
+ def check(rtpe: Type, atpe: Type): Boolean = {
+ def success() = { if (verbose) println(rtpe + " <: " + atpe + "?" + EOL + "true"); true }
+ (rtpe, atpe) match {
+ case _ if rtpe eq atpe => success()
+ case (TypeRef(_, RepeatedParamClass, rtpe :: Nil), TypeRef(_, RepeatedParamClass, atpe :: Nil)) => check(rtpe, atpe)
+ case (ExprClassOf(_), TreeType()) => success()
+ case (TreeType(), ExprClassOf(_)) => success()
+ case _ => rtpe <:< atpe
+ }
+ }
+
+ val ok =
+ if (verbose) withTypesExplained(check(rtpe, atpe))
+ else check(rtpe, atpe)
+ if (!ok) {
+ if (!macroDebugVerbose)
+ explainTypes(rtpe, atpe)
+ compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
+ }
+ }
+
+ private def compatibilityError(message: String) =
+ implRefError(
+ "macro implementation has wrong shape:"+
+ "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
+ "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
+ "\n" + message)
+
+ def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
+
+ def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
+
+ def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
+
+ def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
+
+ def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkConforms("parameter " + rparam.name, rparam.tpe, atpe)
+
+ def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkConforms("return type", atpe, rret)
+
+ def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
+
+ def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
+ def fail(paramName: Name) = compatibilityError("types incompatible for parameter " + paramName + ": corresponding is not a vararg parameter")
+ if (isRepeated(rparam) && !isRepeated(aparam)) fail(rparam.name)
+ if (!isRepeated(rparam) && isRepeated(aparam)) fail(aparam.name)
+ }
+
+ def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
+ compatibilityError(NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
+
+ def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
+ compatibilityError(
+ "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
+ ex.getMessage)
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
new file mode 100644
index 0000000000..3025eb52a1
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala
@@ -0,0 +1,91 @@
+package scala.reflect.macros
+package compiler
+
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Resolvers {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import treeInfo._
+ import gen._
+
+ /** Determines the type of context implied by the macro def.
+ */
+ val ctxTpe = MacroContextClass.tpe
+
+ /** Resolves a macro impl reference provided in the right-hand side of the given macro definition.
+ *
+ * Acceptable shapes of the right-hand side:
+ * 1) [<static object>].<method name>[[<type args>]] // vanilla macro def
+ * 2) [<macro bundle>].<method name>[[<type args>]] // shiny new macro bundle
+ *
+ * Produces a tree, which represents a reference to a macro implementation if everything goes well,
+ * otherwise reports found errors and returns EmptyTree. The resulting tree should have the following format:
+ *
+ * qualifier.method[targs]
+ *
+ * Qualifier here might be omitted (local macro defs), be a static object (vanilla macro defs)
+ * or be a dummy instance of a macro bundle (e.g. new MyMacro(???).expand).
+ */
+ lazy val macroImplRef: Tree = {
+ val (maybeBundleRef, methName, targs) = macroDdef.rhs match {
+ case Applied(methRef @ Select(bundleRef @ RefTree(qual, bundleName), methName), targs, Nil) =>
+ (RefTree(qual, bundleName.toTypeName), methName, targs)
+ case Applied(Ident(methName), targs, Nil) =>
+ (Ident(context.owner.enclClass), methName, targs)
+ case _ =>
+ (EmptyTree, TermName(""), Nil)
+ }
+
+ val untypedImplRef = typer.silent(_.typedType(maybeBundleRef)) match {
+ case SilentResultValue(result) if isMacroBundleType(result.tpe) =>
+ val bundleClass = result.tpe.typeSymbol
+ if (!bundleClass.owner.isPackageClass) abort(macroDef.pos, "macro bundles can only be defined as top-level classes or traits")
+
+ // synthesize the invoker, i.e. given a top-level `trait Foo extends Macro { def expand = ... } `
+ // create a top-level definition `class FooInvoker(val c: Context) extends Foo` in MACRO_INVOKER_PACKAGE
+ val invokerPid = gen.mkUnattributedRef(nme.MACRO_INVOKER_PACKAGE)
+ val invokerName = TypeName(bundleClass.fullName.split('.').map(_.capitalize).mkString("") + nme.MACRO_INVOKER_SUFFIX)
+ val invokerFullName = TypeName(s"$invokerPid.$invokerName")
+ val existingInvoker = rootMirror.getClassIfDefined(invokerFullName)
+ if (!currentRun.compiles(existingInvoker)) {
+ def mkContextValDef(flags: Long) = ValDef(Modifiers(flags), nme.c, TypeTree(ctxTpe), EmptyTree)
+ val contextField = mkContextValDef(PARAMACCESSOR)
+ val contextParam = mkContextValDef(PARAM | PARAMACCESSOR)
+ val invokerCtor = DefDef(Modifiers(), nme.CONSTRUCTOR, Nil, List(List(contextParam)), TypeTree(), Block(List(pendingSuperCall), Literal(Constant(()))))
+ val invoker = atPos(bundleClass.pos)(ClassDef(NoMods, invokerName, Nil, Template(List(Ident(bundleClass)), emptyValDef, List(contextField, invokerCtor))))
+ currentRun.compileLate(PackageDef(invokerPid, List(invoker)))
+ }
+
+ // synthesize the macro impl reference, which is going to look like:
+ // `new Foo$invoker(???).expand` plus the optional type arguments
+ val instanceOfInvoker = New(Select(invokerPid, invokerName), List(List(Select(scalaDot(nme.Predef), nme.???))))
+ gen.mkTypeApply(Select(instanceOfInvoker, methName), targs)
+ case _ =>
+ macroDdef.rhs
+ }
+
+ val typedImplRef = typer.silent(_.typed(markMacroImplRef(untypedImplRef)), reportAmbiguousErrors = false)
+ typedImplRef match {
+ case SilentResultValue(success) => success
+ case SilentTypeError(err) => abort(err.errPos, err.errMsg)
+ }
+ }
+
+ // FIXME: cannot write this concisely because of SI-7507
+ // lazy val (isImplBundle, macroImplOwner, macroImpl, macroImplTargs) =
+ private lazy val dissectedMacroImplRef =
+ macroImplRef match {
+ case MacroImplReference(isBundle, owner, meth, targs) => (isBundle, owner, meth, targs)
+ case _ => MacroImplReferenceWrongShapeError()
+ }
+ lazy val isImplBundle = dissectedMacroImplRef._1
+ lazy val isImplMethod = !isImplBundle
+ lazy val macroImplOwner = dissectedMacroImplRef._2
+ lazy val macroImpl = dissectedMacroImplRef._3
+ lazy val targs = dissectedMacroImplRef._4
+}
diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala
new file mode 100644
index 0000000000..60cfc94a23
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala
@@ -0,0 +1,193 @@
+package scala.reflect.macros
+package compiler
+
+import java.util.UUID.randomUUID
+import scala.reflect.internal.Flags._
+import scala.reflect.macros.TypecheckException
+
+trait Validators {
+ self: DefaultMacroCompiler =>
+
+ import global._
+ import analyzer._
+ import definitions._
+ import treeInfo._
+ import typer.infer._
+
+ def validateMacroImplRef() = {
+ sanityCheck()
+ if (macroImpl != Predef_???) checkMacroDefMacroImplCorrespondence()
+ }
+
+ private def sanityCheck() = {
+ if (!macroImpl.isMethod) MacroImplReferenceWrongShapeError()
+ if (!macroImpl.isPublic) MacroImplNotPublicError()
+ if (macroImpl.isOverloaded) MacroImplOverloadedError()
+ if (macroImpl.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError()
+ val declaredInStaticObject = isImplMethod && (macroImplOwner.isStaticOwner || macroImplOwner.moduleClass.isStaticOwner)
+ val declaredInTopLevelClass = isImplBundle && macroImplOwner.owner.isPackageClass
+ if (!declaredInStaticObject && !declaredInTopLevelClass) MacroImplReferenceWrongShapeError()
+ }
+
+ private def checkMacroDefMacroImplCorrespondence() = {
+ val atvars = atparams map freshVar
+ def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
+
+ // we only check strict correspondence between value parameterss
+ // type parameters of macro defs and macro impls don't have to coincide with each other
+ val implicitParams = aparamss.flatten filter (_.isImplicit)
+ if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
+ if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
+ map2(aparamss, rparamss)((aparams, rparams) => {
+ if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
+ if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
+ })
+
+ try {
+ // cannot fuse this map2 and the map2 above because if aparamss.flatten != rparamss.flatten
+ // then `atpeToRtpe` is going to fail with an unsound substitution
+ map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
+ if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
+ if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
+ val aparamtpe = aparam.tpe.dealias match {
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= ctxTpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case tpe => tpe
+ }
+ checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
+ })
+
+ checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
+
+ val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
+ val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
+ val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
+ boundsOk match {
+ case SilentResultValue(true) => // do nothing, success
+ case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
+ }
+ } catch {
+ case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
+ }
+ }
+
+ // aXXX (e.g. aparamss) => characteristics of the actual macro impl signature extracted from the macro impl ("a" stands for "actual")
+ // rXXX (e.g. rparamss) => characteristics of the reference macro impl signature synthesized from the macro def ("r" stands for "reference")
+ // FIXME: cannot write this concisely because of SI-7507
+ //lazy val MacroImplSig(atparams, aparamss, aret) = macroImplSig
+ //lazy val MacroImplSig(_, rparamss, rret) = referenceMacroImplSig
+ lazy val atparams = macroImplSig.tparams
+ lazy val aparamss = macroImplSig.paramss
+ lazy val aret = macroImplSig.ret
+ lazy val rparamss = referenceMacroImplSig.paramss
+ lazy val rret = referenceMacroImplSig.ret
+
+ // Technically this can be just an alias to MethodType, but promoting it to a first-class entity
+ // provides better encapsulation and convenient syntax for pattern matching.
+ private case class MacroImplSig(tparams: List[Symbol], paramss: List[List[Symbol]], ret: Type)
+
+ /** An actual macro implementation signature extracted from a macro implementation method.
+ *
+ * For the following macro impl:
+ * def fooBar[T: c.WeakTypeTag]
+ * (c: scala.reflect.macros.Context)
+ * (xs: c.Expr[List[T]])
+ * : c.Expr[T] = ...
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]])c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * This method cannot be reduced to just macroImpl.info, because macro implementations might
+ * come in different shapes. If the implementation is an apply method of a Macro-compatible object,
+ * then it won't have (c: Context) in its parameters, but will rather refer to Macro.c.
+ *
+ * @param macroImpl The macro implementation symbol
+ */
+ private lazy val macroImplSig: MacroImplSig = {
+ val tparams = macroImpl.typeParams
+ val paramss = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol)
+ val ret = macroImpl.info.finalResultType
+ MacroImplSig(tparams, paramss, ret)
+ }
+
+ /** A reference macro implementation signature extracted from a given macro definition.
+ *
+ * For the following macro def:
+ * def foo[T](xs: List[T]): T = macro fooBar
+ *
+ * This function will return:
+ * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]])c.Expr[T]
+ *
+ * Note that type tag evidence parameters are not included into the result.
+ * Type tag context bounds for macro impl tparams are optional.
+ * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
+ *
+ * Also note that we need a DefDef, not the corresponding MethodSymbol, because that symbol would be of no use for us.
+ * Macro signatures are verified when typechecking macro defs, which means that at that moment inspecting macroDef.info
+ * means asking for cyclic reference errors.
+ *
+ * We need macro implementation symbol as well, because the return type of the macro definition might be omitted,
+ * and in that case we'd need to infer it from the return type of the macro implementation. Luckily for us, we can
+ * use that symbol without a risk of running into cycles.
+ *
+ * @param typer Typechecker of `macroDdef`
+ * @param macroDdef The macro definition tree
+ * @param macroImpl The macro implementation symbol
+ */
+ private lazy val referenceMacroImplSig: MacroImplSig = {
+ // had to move method's body to an object because of the recursive dependencies between sigma and param
+ object SigGenerator {
+ val cache = scala.collection.mutable.Map[Symbol, Symbol]()
+ val ctxPrefix =
+ if (isImplMethod) singleType(NoPrefix, makeParam(nme.macroContext, macroDdef.pos, ctxTpe, SYNTHETIC))
+ else singleType(ThisType(macroImpl.owner), macroImpl.owner.tpe.member(nme.c))
+ var paramss =
+ if (isImplMethod) List(ctxPrefix.termSymbol) :: mmap(macroDdef.vparamss)(param)
+ else mmap(macroDdef.vparamss)(param)
+ val macroDefRet =
+ if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
+ else computeMacroDefTypeFromMacroImplRef(macroDdef, macroImplRef)
+ val implReturnType = sigma(increaseMetalevel(ctxPrefix, macroDefRet))
+
+ object SigmaTypeMap extends TypeMap {
+ def mapPrefix(pre: Type) = pre match {
+ case ThisType(sym) if sym == macroDef.owner =>
+ singleType(singleType(ctxPrefix, MacroContextPrefix), ExprValue)
+ case SingleType(NoPrefix, sym) =>
+ mfind(macroDdef.vparamss)(_.symbol == sym).fold(pre)(p => singleType(singleType(NoPrefix, param(p)), ExprValue))
+ case _ =>
+ mapOver(pre)
+ }
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = mapPrefix(pre)
+ val args1 = mapOverArgs(args, sym.typeParams)
+ if ((pre eq pre1) && (args eq args1)) tp
+ else typeRef(pre1, sym, args1)
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ def sigma(tpe: Type): Type = SigmaTypeMap(tpe)
+
+ def makeParam(name: Name, pos: Position, tpe: Type, flags: Long) =
+ macroDef.newValueParameter(name.toTermName, pos, flags) setInfo tpe
+ def param(tree: Tree): Symbol = (
+ cache.getOrElseUpdate(tree.symbol, {
+ val sym = tree.symbol
+ assert(sym.isTerm, s"sym = $sym, tree = $tree")
+ makeParam(sym.name, sym.pos, sigma(increaseMetalevel(ctxPrefix, sym.tpe)), sym.flags)
+ })
+ )
+ }
+
+ import SigGenerator._
+ macroLogVerbose(s"generating macroImplSigs for: $macroDdef")
+ val result = MacroImplSig(macroDdef.tparams map (_.symbol), paramss, implReturnType)
+ macroLogVerbose(s"result is: $result")
+ result
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Aliases.scala b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
index 96cf50e498..cc64d97d85 100644
--- a/src/compiler/scala/reflect/macros/runtime/Aliases.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Aliases.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Aliases {
self: Context =>
@@ -29,8 +29,7 @@ trait Aliases {
override def weakTypeOf[T](implicit attag: WeakTypeTag[T]): Type = attag.tpe
override def typeOf[T](implicit ttag: TypeTag[T]): Type = ttag.tpe
- type ImplicitCandidate = (Type, Tree)
implicit class RichOpenImplicit(oi: universe.analyzer.OpenImplicit) {
- def toImplicitCandidate = (oi.pt, oi.tree)
+ def toImplicitCandidate = ImplicitCandidate(oi.info.pre, oi.info.sym, oi.pt, oi.tree)
}
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala
new file mode 100644
index 0000000000..bd1d7d5248
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Context.scala
@@ -0,0 +1,29 @@
+package scala.reflect.macros
+package contexts
+
+import scala.tools.nsc.Global
+
+abstract class Context extends scala.reflect.macros.Context
+ with Aliases
+ with Enclosures
+ with Names
+ with Reifiers
+ with FrontEnds
+ with Infrastructure
+ with Typers
+ with Parsers
+ with Evals
+ with ExprUtils
+ with Synthetics
+ with Traces {
+
+ val universe: Global
+
+ val mirror: universe.Mirror = universe.rootMirror
+
+ val callsiteTyper: universe.analyzer.Typer
+
+ val prefix: Expr[PrefixType]
+
+ val expandee: Tree
+}
diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
new file mode 100644
index 0000000000..bb88c8d5e1
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala
@@ -0,0 +1,36 @@
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.{ClassTag, classTag}
+
+trait Enclosures {
+ self: Context =>
+
+ import universe._
+
+ type MacroRole = analyzer.MacroRole
+ def APPLY_ROLE = analyzer.APPLY_ROLE
+ def macroRole: MacroRole
+
+ private lazy val site = callsiteTyper.context
+ private lazy val enclTrees = site.enclosingContextChain map (_.tree)
+ private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
+
+ private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree
+ private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw new EnclosureException(classTag[T].runtimeClass, enclTrees))
+
+ // vals are eager to simplify debugging
+ // after all we wouldn't save that much time by making them lazy
+ val macroApplication: Tree = expandee
+ def enclosingPackage: PackageDef = strictEnclosure[PackageDef]
+ val enclosingClass: Tree = lenientEnclosure[ImplDef]
+ def enclosingImpl: ImplDef = strictEnclosure[ImplDef]
+ def enclosingTemplate: Template = strictEnclosure[Template]
+ val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
+ val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
+ val enclosingMethod: Tree = lenientEnclosure[DefDef]
+ def enclosingDef: DefDef = strictEnclosure[DefDef]
+ val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
+ val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
+ val enclosingRun: Run = universe.currentRun
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala
index 1f7b5f2ff1..84928ddf86 100644
--- a/src/compiler/scala/reflect/macros/runtime/Evals.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
import scala.reflect.runtime.{universe => ru}
import scala.tools.reflect.ToolBox
@@ -7,7 +7,7 @@ import scala.tools.reflect.ToolBox
trait Evals {
self: Context =>
- private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.macroClassloader)
+ private lazy val evalMirror = ru.runtimeMirror(universe.analyzer.defaultMacroClassloader)
private lazy val evalToolBox = evalMirror.mkToolBox()
private lazy val evalImporter = ru.mkImporter(universe).asInstanceOf[ru.Importer { val from: universe.type }]
diff --git a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
index 672699f00e..4846325d1e 100644
--- a/src/compiler/scala/reflect/macros/runtime/ExprUtils.scala
+++ b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala
@@ -1,11 +1,10 @@
package scala.reflect.macros
-package runtime
+package contexts
trait ExprUtils {
self: Context =>
import universe._
- import mirror._
def literalNull = Expr[Null](Literal(Constant(null)))(TypeTag.Null)
diff --git a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
index a6a198e1b4..fda05de09c 100644
--- a/src/compiler/scala/reflect/macros/runtime/FrontEnds.scala
+++ b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala
@@ -1,5 +1,7 @@
package scala.reflect.macros
-package runtime
+package contexts
+
+import scala.reflect.macros.runtime.AbortMacroException
trait FrontEnds {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
index 7781693822..df7aa4d2be 100644
--- a/src/compiler/scala/reflect/macros/runtime/Infrastructure.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Infrastructure {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
index ee9f3a56d3..e535754a98 100644
--- a/src/compiler/scala/reflect/macros/runtime/Names.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Names {
self: Context =>
@@ -7,11 +7,20 @@ trait Names {
lazy val freshNameCreator = callsiteTyper.context.unit.fresh
def fresh(): String =
- freshNameCreator.newName()
+ freshName()
def fresh(name: String): String =
- freshNameCreator.newName(name)
+ freshName(name)
def fresh[NameType <: Name](name: NameType): NameType =
+ freshName[NameType](name)
+
+ def freshName(): String =
+ freshNameCreator.newName()
+
+ def freshName(name: String): String =
+ freshNameCreator.newName(name)
+
+ def freshName[NameType <: Name](name: NameType): NameType =
name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index 566bcde73d..3dab02beba 100644
--- a/src/compiler/scala/reflect/macros/runtime/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
import scala.language.existentials
import scala.tools.reflect.ToolBox
diff --git a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
index 8bb388be8f..ecef1c7289 100644
--- a/src/compiler/scala/reflect/macros/runtime/Reifiers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala
@@ -4,7 +4,7 @@
*/
package scala.reflect.macros
-package runtime
+package contexts
trait Reifiers {
self: Context =>
@@ -60,15 +60,15 @@ trait Reifiers {
def logFreeVars(symtab: SymbolTable): Unit =
// logging free vars only when they are untyped prevents avalanches of duplicate messages
symtab.syms map (sym => symtab.symDef(sym)) foreach {
- case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms.value && binding.tpe == null =>
+ case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null =>
reporter.echo(position, "free term: %s %s".format(showRaw(binding), origin))
- case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes.value && binding.tpe == null =>
+ case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null =>
reporter.echo(position, "free type: %s %s".format(showRaw(binding), origin))
case _ =>
// do nothing
}
- if (universe.settings.logFreeTerms.value || universe.settings.logFreeTypes.value)
+ if (universe.settings.logFreeTerms || universe.settings.logFreeTypes)
reification match {
case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab)
case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab)
diff --git a/src/compiler/scala/reflect/macros/contexts/Synthetics.scala b/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
new file mode 100644
index 0000000000..ada16a8113
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/contexts/Synthetics.scala
@@ -0,0 +1,66 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ */
+
+package scala.reflect.macros
+package contexts
+
+import scala.reflect.internal.Flags._
+import scala.reflect.internal.util.BatchSourceFile
+import scala.reflect.io.VirtualFile
+
+trait Synthetics {
+ self: Context =>
+
+ import global._
+ import mirror.wrapMissing
+
+ // getClassIfDefined and getModuleIfDefined cannot be used here
+ // because they don't work for stuff declared in the empty package
+ // (as specified in SLS, code inside non-empty packages cannot see
+ // declarations from the empty package, so compiler internals
+ // default to ignoring contents of the empty package)
+ // to the contrast, staticModule and staticClass are designed
+ // to be a part of the reflection API and, therefore, they
+ // correctly resolve all names
+ private def topLevelSymbol(name: Name): Symbol = wrapMissing {
+ if (name.isTermName) mirror.staticModule(name.toString)
+ else mirror.staticClass(name.toString)
+ }
+
+ def topLevelDef(name: Name): Tree =
+ enclosingRun.units.toList.map(_.body).flatMap {
+ // it's okay to check `stat.symbol` here, because currently macros expand strictly after namer
+ // which means that by the earliest time one can call this method all top-level definitions will have already been entered
+ case PackageDef(_, stats) => stats filter (stat => stat.symbol != NoSymbol && stat.symbol == topLevelSymbol(name))
+ case _ => Nil // should never happen, but better be safe than sorry
+ }.headOption getOrElse EmptyTree
+
+ def topLevelRef(name: Name): Tree = {
+ if (topLevelDef(name).nonEmpty) gen.mkUnattributedRef(name)
+ else EmptyTree
+ }
+
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: universe.ImplDef): RefTree =
+ introduceTopLevel(packagePrototype, List(definition)).head
+
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: universe.ImplDef*): List[RefTree] =
+ introduceTopLevel(packagePrototype, definitions.toList)
+
+ private def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: List[universe.ImplDef]): List[RefTree] = {
+ val code @ PackageDef(pid, _) = implicitly[PackageSpec[T]].mkPackageDef(packagePrototype, definitions)
+ universe.currentRun.compileLate(code)
+ definitions map (definition => Select(pid, definition.name))
+ }
+
+ protected def mkPackageDef(name: String, stats: List[Tree]) = gen.mkPackageDef(name, stats)
+
+ protected def mkPackageDef(name: TermName, stats: List[Tree]) = gen.mkPackageDef(name.toString, stats)
+
+ protected def mkPackageDef(tree: RefTree, stats: List[Tree]) = PackageDef(tree, stats)
+
+ protected def mkPackageDef(sym: Symbol, stats: List[Tree]) = {
+ assert(sym hasFlag PACKAGE, s"expected a package or package class symbol, found: $sym")
+ gen.mkPackageDef(sym.fullName.toString, stats)
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/Traces.scala b/src/compiler/scala/reflect/macros/contexts/Traces.scala
index 0238e9f84e..df47f6ba81 100644
--- a/src/compiler/scala/reflect/macros/runtime/Traces.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Traces.scala
@@ -1,5 +1,5 @@
package scala.reflect.macros
-package runtime
+package contexts
trait Traces extends util.Traces {
self: Context =>
diff --git a/src/compiler/scala/reflect/macros/runtime/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala
index a51bee0fe8..4a1122b913 100644
--- a/src/compiler/scala/reflect/macros/runtime/Typers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala
@@ -1,5 +1,7 @@
package scala.reflect.macros
-package runtime
+package contexts
+
+import scala.reflect.internal.Mode
trait Typers {
self: Context =>
@@ -9,7 +11,7 @@ trait Typers {
def openImplicits: List[ImplicitCandidate] = callsiteTyper.context.openImplicits.map(_.toImplicitCandidate)
/**
- * @see [[scala.tools.reflect.Toolbox.typeCheck]]
+ * @see [[scala.tools.reflect.ToolBox.typeCheck]]
*/
def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = {
macroLogVerbose("typechecking %s with expected type %s, implicit views = %s, macros = %s".format(tree, pt, !withImplicitViewsDisabled, !withMacrosDisabled))
@@ -22,7 +24,7 @@ trait Typers {
// typechecking uses silent anyways (e.g. in typedSelect), so you'll only waste your time
// I'd advise fixing the root cause: finding why the context is not set to report errors
// (also see reflect.runtime.ToolBoxes.typeCheckExpr for a workaround that might work for you)
- wrapper(callsiteTyper.silent(_.typed(tree, universe.analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+ wrapper(callsiteTyper.silent(_.typed(tree, pt), reportAmbiguousErrors = false) match {
case universe.analyzer.SilentResultValue(result) =>
macroLogVerbose(result)
result
diff --git a/src/compiler/scala/reflect/macros/runtime/Context.scala b/src/compiler/scala/reflect/macros/runtime/Context.scala
deleted file mode 100644
index 8e8b0fcea1..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Context.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-import scala.tools.nsc.Global
-
-abstract class Context extends scala.reflect.macros.Context
- with Aliases
- with Enclosures
- with Names
- with Reifiers
- with FrontEnds
- with Infrastructure
- with Typers
- with Parsers
- with Evals
- with ExprUtils
- with Traces {
-
- val universe: Global
-
- val mirror: universe.Mirror = universe.rootMirror
-
- val callsiteTyper: universe.analyzer.Typer
-
- val prefix: Expr[PrefixType]
-
- val expandee: Tree
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala b/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
deleted file mode 100644
index 2a4a22f81c..0000000000
--- a/src/compiler/scala/reflect/macros/runtime/Enclosures.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-package scala.reflect.macros
-package runtime
-
-trait Enclosures {
- self: Context =>
-
- import universe._
- import mirror._
-
- private def site = callsiteTyper.context
- private def enclTrees = site.enclosingContextChain map (_.tree)
- private def enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition)
-
- // vals are eager to simplify debugging
- // after all we wouldn't save that much time by making them lazy
- val macroApplication: Tree = expandee
- val enclosingClass: Tree = enclTrees collectFirst { case x: ImplDef => x } getOrElse EmptyTree
- val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate)
- val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self
- val enclosingMethod: Tree = site.enclMethod.tree
- val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos
- val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit
- val enclosingRun: Run = universe.currentRun
-}
diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
new file mode 100644
index 0000000000..3ef11fad9d
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala
@@ -0,0 +1,31 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.ReflectionUtils
+import scala.reflect.macros.{Context => ApiContext}
+
+trait JavaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ trait JavaReflectionResolvers {
+ self: MacroRuntimeResolver =>
+
+ import global._
+
+ def resolveJavaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
+ val implClass = Class.forName(className, true, classLoader)
+ val implMeths = implClass.getDeclaredMethods.find(_.getName == methName)
+ // relies on the fact that macro impls cannot be overloaded
+ // so every methName can resolve to at maximum one method
+ val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
+ macroLogVerbose(s"successfully loaded macro impl as ($implClass, $implMeth)")
+ args => {
+ val implObj =
+ if (isBundle) implClass.getConstructor(classOf[ApiContext]).newInstance(args.c)
+ else ReflectionUtils.staticSingletonInstance(implClass)
+ val implArgs = if (isBundle) args.others else args.c +: args.others
+ implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*)
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
new file mode 100644
index 0000000000..0f89163803
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala
@@ -0,0 +1,74 @@
+package scala.reflect.macros
+package runtime
+
+import scala.collection.mutable.{Map => MutableMap}
+import scala.reflect.internal.Flags._
+import scala.reflect.runtime.ReflectionUtils
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.util.AbstractFileClassLoader
+
+trait MacroRuntimes extends JavaReflectionRuntimes with ScalaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ import global._
+ import definitions._
+
+ /** Produces a function that can be used to invoke macro implementation for a given macro definition:
+ * 1) Looks up macro implementation symbol in this universe.
+ * 2) Loads its enclosing class from the macro classloader.
+ * 3) Loads the companion of that enclosing class from the macro classloader.
+ * 4) Resolves macro implementation within the loaded companion.
+ *
+ * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
+ * `null` otherwise.
+ */
+ private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
+ def macroRuntime(macroDef: Symbol): MacroRuntime = {
+ macroLogVerbose(s"looking for macro implementation: $macroDef")
+ if (fastTrack contains macroDef) {
+ macroLogVerbose("macro expansion is serviced by a fast track")
+ fastTrack(macroDef)
+ } else {
+ macroRuntimesCache.getOrElseUpdate(macroDef, new MacroRuntimeResolver(macroDef).resolveRuntime())
+ }
+ }
+
+ /** Macro classloader that is used to resolve and run macro implementations.
+ * Loads classes from from -cp (aka the library classpath).
+ * Is also capable of detecting REPL and reusing its classloader.
+ *
+ * When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations,
+ * which compiles implementations into a virtual directory (very much like REPL does) and then conjures
+ * a classloader mapped to that virtual directory.
+ */
+ lazy val defaultMacroClassloader: ClassLoader = findMacroClassLoader()
+
+ /** Abstracts away resolution of macro runtimes.
+ */
+ type MacroRuntime = MacroArgs => Any
+ class MacroRuntimeResolver(val macroDef: Symbol) extends JavaReflectionResolvers
+ with ScalaReflectionResolvers {
+ val binding = loadMacroImplBinding(macroDef)
+ val isBundle = binding.isBundle
+ val className = binding.className
+ val methName = binding.methName
+
+ def resolveRuntime(): MacroRuntime = {
+ if (className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded) {
+ args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
+ } else {
+ try {
+ macroLogVerbose(s"resolving macro implementation as $className.$methName (isBundle = $isBundle)")
+ macroLogVerbose(s"classloader is: ${ReflectionUtils.show(defaultMacroClassloader)}")
+ // resolveScalaReflectionRuntime(defaultMacroClassloader)
+ resolveJavaReflectionRuntime(defaultMacroClassloader)
+ } catch {
+ case ex: Exception =>
+ macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
+ macroDef setFlag IS_ERROR
+ null
+ }
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
new file mode 100644
index 0000000000..1999e525ff
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/ScalaReflectionRuntimes.scala
@@ -0,0 +1,33 @@
+package scala.reflect.macros
+package runtime
+
+import scala.reflect.runtime.{universe => ru}
+
+trait ScalaReflectionRuntimes {
+ self: scala.tools.nsc.typechecker.Analyzer =>
+
+ trait ScalaReflectionResolvers {
+ self: MacroRuntimeResolver =>
+
+ import global._
+
+ def resolveScalaReflectionRuntime(classLoader: ClassLoader): MacroRuntime = {
+ val macroMirror: ru.JavaMirror = ru.runtimeMirror(classLoader)
+ val implContainerSym = macroMirror.classSymbol(Class.forName(className, true, classLoader))
+ val implMethSym = implContainerSym.typeSignature.member(ru.TermName(methName)).asMethod
+ macroLogVerbose(s"successfully loaded macro impl as ($implContainerSym, $implMethSym)")
+ args => {
+ val implContainer =
+ if (isBundle) {
+ val implCtorSym = implContainerSym.typeSignature.member(ru.nme.CONSTRUCTOR).asMethod
+ macroMirror.reflectClass(implContainerSym).reflectConstructor(implCtorSym)(args.c)
+ } else {
+ macroMirror.reflectModule(implContainerSym.module.asModule).instance
+ }
+ val implMeth = macroMirror.reflect(implContainer).reflectMethod(implMethSym)
+ val implArgs = if (isBundle) args.others else args.c +: args.others
+ implMeth(implArgs: _*)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/reflect/macros/runtime/package.scala b/src/compiler/scala/reflect/macros/runtime/package.scala
new file mode 100644
index 0000000000..9ef8200760
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/runtime/package.scala
@@ -0,0 +1,5 @@
+package scala.reflect.macros
+
+package object runtime {
+ type Context = scala.reflect.macros.contexts.Context
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
new file mode 100644
index 0000000000..ada8efa833
--- /dev/null
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -0,0 +1,71 @@
+package scala.reflect.macros
+package util
+
+import scala.tools.nsc.typechecker.Analyzer
+
+trait Helpers {
+ self: Analyzer =>
+
+ import global._
+ import definitions._
+
+ /** Transforms parameters lists of a macro impl.
+ * The `transform` function is invoked only for WeakTypeTag evidence parameters.
+ *
+ * The transformer takes two arguments: a value parameter from the parameter list
+ * and a type parameter that is witnesses by the value parameter.
+ *
+ * If the transformer returns a NoSymbol, the value parameter is not included from the result.
+ * If the transformer returns something else, this something else is included in the result instead of the value parameter.
+ *
+ * Despite of being highly esoteric, this function significantly simplifies signature analysis.
+ * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
+ * or to streamline creation of the list of macro arguments.
+ */
+ def transformTypeTagEvidenceParams(macroImplRef: Tree, transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
+ val treeInfo.MacroImplReference(isBundle, owner, macroImpl, _) = macroImplRef
+ val paramss = macroImpl.paramss
+ if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
+ val rc =
+ if (isBundle) macroImpl.owner.tpe.member(nme.c)
+ else {
+ def cparam = paramss.head.head
+ if (paramss.head.isEmpty || !(cparam.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
+ cparam
+ }
+ def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
+ case TypeRef(SingleType(SingleType(_, ac), universe), WeakTypeTagClass, targ :: Nil)
+ if ac == rc && universe == MacroContextUniverse =>
+ transform(param, targ.typeSymbol)
+ case _ =>
+ param
+ }
+ val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
+ if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
+ }
+
+ private def dealiasAndRewrap(tp: Type)(fn: Type => Type): Type = {
+ if (isRepeatedParamType(tp)) scalaRepeatedType(fn(tp.typeArgs.head.dealias))
+ else fn(tp.dealias)
+ }
+
+ /** Increases metalevel of the type, i.e. transforms:
+ * * T to c.Expr[T]
+ *
+ * @see Metalevels.scala for more information and examples about metalevels
+ */
+ def increaseMetalevel(pre: Type, tp: Type): Type = dealiasAndRewrap(tp) {
+ case tp => typeRef(pre, MacroContextExprClass, List(tp))
+ }
+
+ /** Decreases metalevel of the type, i.e. transforms:
+ * * c.Expr[T] to T
+ * * Anything else to Any
+ *
+ * @see Metalevels.scala for more information and examples about metalevels
+ */
+ def decreaseMetalevel(tp: Type): Type = dealiasAndRewrap(tp) {
+ case ExprClassOf(runtimeType) => runtimeType
+ case _ => AnyClass.tpe // so that macro impls with rhs = ??? don't screw up our inference
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 2e57bc59a8..860dfd72b2 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -7,7 +7,6 @@ trait Errors {
self: Reifier =>
import global._
- import definitions._
def defaultErrorPosition = {
val stack = currents collect { case t: Tree if t.pos != NoPosition => t.pos }
@@ -22,11 +21,6 @@ trait Errors {
throw new ReificationException(defaultErrorPosition, msg)
}
- def CannotReifySymbol(sym: Symbol) = {
- val msg = "implementation restriction: cannot reify symbol %s (%s)".format(sym, sym.accurateKindString)
- throw new ReificationException(defaultErrorPosition, msg)
- }
-
def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
throw new ReificationException(ctt.pos, msg)
diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala
index 1710cae2a5..4572caeb36 100644
--- a/src/compiler/scala/reflect/reify/Phases.scala
+++ b/src/compiler/scala/reflect/reify/Phases.scala
@@ -10,7 +10,6 @@ trait Phases extends Reshape
self: Reifier =>
import global._
- import definitions._
private var alreadyRun = false
@@ -26,7 +25,7 @@ trait Phases extends Reshape
if (reifyDebug) println("[reshape phase]")
tree = reshape.transform(tree)
if (reifyDebug) println("[interlude]")
- if (reifyDebug) println("reifee = " + (if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
+ if (reifyDebug) println("reifee = " + (if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString))
if (reifyDebug) println("[calculate phase]")
calculate.traverse(tree)
@@ -41,4 +40,4 @@ trait Phases extends Reshape
result
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala
index 47669f57b0..74949fce6d 100644
--- a/src/compiler/scala/reflect/reify/Reifier.scala
+++ b/src/compiler/scala/reflect/reify/Reifier.scala
@@ -6,10 +6,11 @@ import scala.reflect.macros.UnexpectedReificationException
import scala.reflect.reify.utils.Utils
/** Given a tree or a type, generate a tree that when executed at runtime produces the original tree or type.
- * See more info in the comments to ``reify'' in scala.reflect.api.Universe.
+ * See more info in the comments to `reify` in scala.reflect.api.Universe.
*
- * @author Martin Odersky
- * @version 2.10
+ * @author Martin Odersky
+ * @version 2.10
+ * @since 2.10
*/
abstract class Reifier extends States
with Phases
@@ -31,20 +32,20 @@ abstract class Reifier extends States
this.asInstanceOf[Reifier { val global: Reifier.this.global.type }]
override def hasReifier = true
- /**
- * For ``reifee'' and other reification parameters, generate a tree of the form
- *
+ /** For `reifee` and other reification parameters, generate a tree of the form
+ * {{{
* {
- * val $u: universe.type = <[ universe ]>
- * val $m: $u.Mirror = <[ mirror ]>
- * $u.Expr[T](rtree) // if data is a Tree
- * $u.TypeTag[T](rtree) // if data is a Type
+ * val \$u: universe.type = <[ universe ]>
+ * val \$m: \$u.Mirror = <[ mirror ]>
+ * \$u.Expr[T](rtree) // if data is a Tree
+ * \$u.TypeTag[T](rtree) // if data is a Type
* }
+ * }}}
*
* where
*
- * - `universe` is the tree that represents the universe the result will be bound to
- * - `mirror` is the tree that represents the mirror the result will be initially bound to
+ * - `universe` is the tree that represents the universe the result will be bound to.
+ * - `mirror` is the tree that represents the mirror the result will be initially bound to.
* - `rtree` is code that generates `reifee` at runtime.
* - `T` is the type that corresponds to `data`.
*
@@ -57,7 +58,7 @@ abstract class Reifier extends States
val result = reifee match {
case tree: Tree =>
- reifyTrace("reifying = ")(if (opt.showTrees) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
+ reifyTrace("reifying = ")(if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString)
reifyTrace("reifee is located at: ")(tree.pos)
reifyTrace("universe = ")(universe)
reifyTrace("mirror = ")(mirror)
@@ -140,4 +141,4 @@ abstract class Reifier extends States
throw new UnexpectedReificationException(defaultErrorPosition, "reification crashed", ex)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala
index 58455c9f3c..29bfa19845 100644
--- a/src/compiler/scala/reflect/reify/States.scala
+++ b/src/compiler/scala/reflect/reify/States.scala
@@ -4,7 +4,6 @@ trait States {
self: Reifier =>
import global._
- import definitions._
/** Encapsulates reifier state
*
diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala
index cbaee41890..0bffe55403 100644
--- a/src/compiler/scala/reflect/reify/Taggers.scala
+++ b/src/compiler/scala/reflect/reify/Taggers.scala
@@ -1,14 +1,13 @@
package scala.reflect.reify
import scala.reflect.macros.{ReificationException, UnexpectedReificationException, TypecheckException}
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
abstract class Taggers {
val c: Context
import c.universe._
import definitions._
- import treeBuild._
val coreTags = Map(
ByteTpe -> nme.Byte,
@@ -59,7 +58,7 @@ abstract class Taggers {
val result =
tpe match {
case coreTpe if coreTags contains coreTpe =>
- val ref = if (tagModule.owner.isPackageClass) Ident(tagModule) else Select(prefix, tagModule.name)
+ val ref = if (tagModule.isTopLevel) Ident(tagModule) else Select(prefix, tagModule.name)
Select(ref, coreTags(coreTpe))
case _ =>
translatingReificationErrors(materializer)
diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
index dec491aabe..5a454e1e07 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala
@@ -5,7 +5,6 @@ trait GenAnnotationInfos {
self: Reifier =>
import global._
- import definitions._
// usually annotations are reified as their originals from Modifiers
// however, when reifying free and tough types, we're forced to reify annotation infos as is
@@ -52,4 +51,4 @@ trait GenAnnotationInfos {
val reifiedAssocs = ann.assocs map (assoc => scalaFactoryCall(nme.Tuple2, reify(assoc._1), reifyClassfileAnnotArg(assoc._2)))
mirrorFactoryCall(nme.Annotation, reify(ann.atp), mkList(reifiedArgs), mkListMap(reifiedAssocs))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
index 4abf88f475..4266c6f8d6 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala
@@ -5,10 +5,9 @@ trait GenNames {
self: Reifier =>
import global._
- import definitions._
def reifyName(name: Name) = {
- val factory = if (name.isTypeName) nme.nmeNewTypeName else nme.nmeNewTermName
+ val factory = if (name.isTypeName) nme.TypeName else nme.TermName
mirrorCall(factory, Literal(Constant(name.toString)))
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
index 8c5db04454..1d151c5135 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala
@@ -5,7 +5,6 @@ trait GenPositions {
self: Reifier =>
import global._
- import definitions._
// we do not reify positions because this inflates resulting trees, but doesn't buy as anything
// where would one use positions? right, in error messages
@@ -14,4 +13,4 @@ trait GenPositions {
// however both macros and toolboxes have their own means to report errors in synthetic trees
def reifyPosition(pos: Position): Tree =
reifyMirrorObject(NoPosition)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
index 47c966ea24..90fb41f80b 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala
@@ -7,7 +7,6 @@ trait GenSymbols {
self: Reifier =>
import global._
- import definitions._
/** Symbol table of the reifee.
*
@@ -43,7 +42,7 @@ trait GenSymbols {
else if (sym.isPackage)
mirrorMirrorCall(nme.staticPackage, reify(sym.fullName))
else if (sym.isLocatable) {
- /** This is a fancy conundrum that stems from the fact that Scala allows
+ /* This is a fancy conundrum that stems from the fact that Scala allows
* packageless packages and packageless objects with the same names in the same program.
*
* For more details read the docs to staticModule and staticPackage.
@@ -101,7 +100,7 @@ trait GenSymbols {
def reifyFreeTerm(binding: Tree): Tree =
reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free term" + (if (sym.isCapturedVariable) " (captured)" else "") + ": " + sym + "(" + sym.accurateKindString + ")")
- val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
+ val name = newTermName("" + nme.REIFY_FREE_PREFIX + sym.name + (if (sym.isType) nme.REIFY_FREE_THIS_SUFFIX else ""))
// We need to note whether the free value being reified is stable or not to guide subsequent reflective compilation.
// Here's why reflection compilation needs our help.
//
@@ -142,14 +141,14 @@ trait GenSymbols {
reifyIntoSymtab(binding.symbol) { sym =>
if (reifyDebug) println("Free type: %s (%s)".format(sym, sym.accurateKindString))
state.reificationIsConcrete = false
- val name = newTermName(nme.REIFY_FREE_PREFIX + sym.name)
+ val name: TermName = nme.REIFY_FREE_PREFIX append sym.name
Reification(name, binding, mirrorBuildCall(nme.newFreeType, reify(sym.name.toString), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(origin(sym))))
}
def reifySymDef(sym: Symbol): Tree =
reifyIntoSymtab(sym) { sym =>
if (reifyDebug) println("Sym def: %s (%s)".format(sym, sym.accurateKindString))
- val name = newTermName(nme.REIFY_SYMDEF_PREFIX + sym.name)
+ val name: TermName = nme.REIFY_SYMDEF_PREFIX append sym.name
def reifiedOwner = if (sym.owner.isLocatable) reify(sym.owner) else reifySymDef(sym.owner)
Reification(name, Ident(sym), mirrorBuildCall(nme.newNestedSymbol, reifiedOwner, reify(sym.name), reify(sym.pos), mirrorBuildCall(nme.flagsFromBits, reify(sym.flags)), reify(sym.isClass)))
}
@@ -173,7 +172,7 @@ trait GenSymbols {
val reification = reificode(sym)
import reification.{name, binding}
val tree = reification.tree updateAttachment ReifyBindingAttachment(binding)
- state.symtab += (sym, name, tree)
+ state.symtab += (sym, name.toTermName, tree)
}
fromSymtab
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 9894e359b4..78bdf7e132 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -15,7 +15,7 @@ trait GenTrees {
/**
* Reify a tree.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reifyTree(tree: Tree): Tree = {
assert(tree != null, "tree is null")
@@ -29,12 +29,12 @@ trait GenTrees {
// the idea behind the new reincarnation of reifier is a simple maxim:
//
- // never call ``reifyType'' to reify a tree
+ // never call `reifyType` to reify a tree
//
// this works because the stuff we are reifying was once represented with trees only
// and lexical scope information can be fully captured by reifying symbols
//
- // to enable this idyll, we work hard in the ``Reshape'' phase
+ // to enable this idyll, we work hard in the `Reshape` phase
// which replaces all types with equivalent trees and works around non-idempotencies of the typechecker
//
// why bother? because this brings method to the madness
@@ -45,7 +45,9 @@ trait GenTrees {
case global.EmptyTree =>
reifyMirrorObject(EmptyTree)
case global.emptyValDef =>
- mirrorBuildSelect(nme.emptyValDef)
+ mirrorSelect(nme.emptyValDef)
+ case global.pendingSuperCall =>
+ mirrorSelect(nme.pendingSuperCall)
case FreeDef(_, _, _, _, _) =>
reifyNestedFreeDef(tree)
case FreeRef(_, _) =>
@@ -63,8 +65,8 @@ trait GenTrees {
}
// usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
- // however, reification of AnnotatedTypes is special. see ``reifyType'' to find out why.
- if (reifyTreeSymbols && tree.hasSymbol) {
+ // however, reification of AnnotatedTypes is special. see `reifyType` to find out why.
+ if (reifyTreeSymbols && tree.hasSymbolField) {
if (reifyDebug) println("reifying symbol %s for tree %s".format(tree.symbol, tree))
rtree = mirrorBuildCall(nme.setSymbol, rtree, reify(tree.symbol))
}
@@ -84,13 +86,13 @@ trait GenTrees {
case TreeSplice(splicee) =>
if (reifyDebug) println("splicing " + tree)
- // see ``Metalevels'' for more info about metalevel breaches
+ // see `Metalevels` for more info about metalevel breaches
// and about how we deal with splices that contain them
- val isMetalevelBreach = splicee exists (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
- val isRuntimeEval = splicee exists (sub => sub.hasSymbol && sub.symbol == ExprSplice)
+ val isMetalevelBreach = splicee exists (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val isRuntimeEval = splicee exists (sub => sub.hasSymbolField && sub.symbol == ExprSplice)
if (isMetalevelBreach || isRuntimeEval) {
// we used to convert dynamic splices into runtime evals transparently, but we no longer do that
- // why? see comments in ``Metalevels''
+ // why? see comments in `Metalevels`
// if (reifyDebug) println("splicing has failed: cannot splice when facing a metalevel breach")
// EmptyTree
CannotReifyRuntimeSplice(tree)
@@ -100,7 +102,7 @@ trait GenTrees {
// we intentionally don't care about the prefix (the first underscore in the `RefiedTree` pattern match)
case ReifiedTree(_, _, inlinedSymtab, rtree, _, _, _) =>
if (reifyDebug) println("inlining the splicee")
- // all free vars local to the enclosing reifee should've already been inlined by ``Metalevels''
+ // all free vars local to the enclosing reifee should've already been inlined by `Metalevels`
for (sym <- inlinedSymtab.syms if sym.isLocalToReifee)
abort("local free var, should have already been inlined by Metalevels: " + inlinedSymtab.symDef(sym))
state.symtab ++= inlinedSymtab
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index bb7e1f9b56..6c94726231 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -9,7 +9,7 @@ trait GenTypes {
/**
* Reify a type.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reifyType(tpe: Type): Tree = {
assert(tpe != null, "tpe is null")
@@ -73,7 +73,6 @@ trait GenTypes {
if (reifyDebug) println("splicing " + tpe)
val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
- val key = (tagFlavor, tpe.typeSymbol)
// if this fails, it might produce the dreaded "erroneous or inaccessible type" error
// to find out the whereabouts of the error run scalac with -Ydebug
if (reifyDebug) println("launching implicit search for %s.%s[%s]".format(universe, tagFlavor, tpe))
@@ -156,7 +155,7 @@ trait GenTypes {
*/
private def reifySemiConcreteTypeMember(tpe: Type): Tree = tpe match {
case tpe @ TypeRef(pre @ SingleType(prepre, presym), sym, args) if sym.isAbstractType && !sym.isExistential =>
- return mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
+ mirrorFactoryCall(nme.TypeRef, reify(pre), mirrorBuildCall(nme.selectType, reify(sym.owner), reify(sym.name.toString)), reify(args))
}
/** Reify an annotated type, i.e. the one that makes us deal with AnnotationInfos */
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 49877b4286..e0570d61f2 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -5,7 +5,6 @@ trait GenUtils {
self: Reifier =>
import global._
- import definitions._
def reifyList(xs: List[Any]): Tree =
mkList(xs map reify)
@@ -31,41 +30,32 @@ trait GenUtils {
def call(fname: String, args: Tree*): Tree =
Apply(termPath(fname), args.toList)
- def mirrorSelect(name: String): Tree =
- termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorSelect(name: String): Tree = termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorSelect(name: TermName): Tree = mirrorSelect(name.toString)
- def mirrorBuildSelect(name: String): Tree =
- termPath(nme.UNIVERSE_BUILD_PREFIX + name)
-
- def mirrorMirrorSelect(name: String): Tree =
- termPath(nme.MIRROR_PREFIX + name)
+ def mirrorMirrorSelect(name: TermName): Tree =
+ termPath("" + nme.MIRROR_PREFIX + name)
def mirrorCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.UNIVERSE_PREFIX append name), args: _*)
-
- def mirrorCall(name: String, args: Tree*): Tree =
- call(nme.UNIVERSE_PREFIX + name, args: _*)
+ call("" + nme.UNIVERSE_PREFIX + name, args: _*)
def mirrorBuildCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.UNIVERSE_BUILD_PREFIX append name), args: _*)
-
- def mirrorBuildCall(name: String, args: Tree*): Tree =
- call(nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
+ call("" + nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
def mirrorMirrorCall(name: TermName, args: Tree*): Tree =
- call("" + (nme.MIRROR_PREFIX append name), args: _*)
-
- def mirrorMirrorCall(name: String, args: Tree*): Tree =
- call(nme.MIRROR_PREFIX + name, args: _*)
+ call("" + nme.MIRROR_PREFIX + name, args: _*)
def mirrorFactoryCall(value: Product, args: Tree*): Tree =
mirrorFactoryCall(value.productPrefix, args: _*)
- def mirrorFactoryCall(prefix: String, args: Tree*): Tree =
- mirrorCall(prefix, args: _*)
+ def mirrorFactoryCall(prefix: TermName, args: Tree*): Tree =
+ mirrorCall("" + prefix, args: _*)
+
+ def scalaFactoryCall(name: TermName, args: Tree*): Tree =
+ call(s"scala.$name.apply", args: _*)
def scalaFactoryCall(name: String, args: Tree*): Tree =
- call("scala." + name + ".apply", args: _*)
+ scalaFactoryCall(name: TermName, args: _*)
def mkList(args: List[Tree]): Tree =
scalaFactoryCall("collection.immutable.List", args: _*)
@@ -91,22 +81,6 @@ trait GenUtils {
/** An (unreified) path that refers to term definition with given fully qualified name */
def termPath(fullname: String): Tree = path(fullname, newTermName)
- /** An (unreified) path that refers to type definition with given fully qualified name */
- def typePath(fullname: String): Tree = path(fullname, newTypeName)
-
- def isTough(tpe: Type) = {
- def isTough(tpe: Type) = tpe match {
- case _: RefinedType => true
- case _: ExistentialType => true
- case _: ClassInfoType => true
- case _: MethodType => true
- case _: PolyType => true
- case _ => false
- }
-
- tpe != null && (tpe exists isTough)
- }
-
object TypedOrAnnotated {
def unapply(tree: Tree): Option[Tree] = tree match {
case ty @ Typed(_, _) =>
@@ -118,15 +92,6 @@ trait GenUtils {
}
}
- def isAnnotated(tpe: Type) = {
- def isAnnotated(tpe: Type) = tpe match {
- case _: AnnotatedType => true
- case _ => false
- }
-
- tpe != null && (tpe exists isAnnotated)
- }
-
def isSemiConcreteTypeMember(tpe: Type) = tpe match {
case TypeRef(SingleType(_, _), sym, _) if sym.isAbstractType && !sym.isExistential => true
case _ => false
@@ -145,4 +110,4 @@ trait GenUtils {
if (origin == "") origin = "of unknown origin"
origin
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index 6777bb0a50..d3cae3d123 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -1,11 +1,11 @@
-package scala.reflect
+package scala
+package reflect
-import scala.language.implicitConversions
-import scala.reflect.macros.{Context, ReificationException, UnexpectedReificationException}
+import scala.reflect.macros.ReificationException
import scala.tools.nsc.Global
package object reify {
- private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean = false): Reifier { val global: global1.type } = {
+ private def mkReifier(global1: Global)(typer: global1.analyzer.Typer, universe: global1.Tree, mirror: global1.Tree, reifee: Any, concrete: Boolean): Reifier { val global: global1.type } = {
val typer1: typer.type = typer
val universe1: universe.type = universe
val mirror1: mirror.type = mirror
@@ -24,7 +24,8 @@ package object reify {
private[reify] def mkDefaultMirrorRef(global: Global)(universe: global.Tree, typer0: global.analyzer.Typer): global.Tree = {
import global._
- import definitions._
+ import definitions.JavaUniverseClass
+
val enclosingErasure = {
val rClassTree = reifyEnclosingRuntimeClass(global)(typer0)
// HACK around SI-6259
@@ -59,7 +60,7 @@ package object reify {
if (concrete) throw new ReificationException(enclosingMacroPosition, "tpe %s is an unresolved spliceable type".format(tpe))
}
- tpe match {
+ tpe.dealiasWiden match {
case TypeRef(_, ArrayClass, componentTpe :: Nil) =>
val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete)
gen.mkMethodCall(arrayClassMethod, List(componentErasure))
@@ -74,7 +75,6 @@ package object reify {
// a class/object body, this will return an EmptyTree.
def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = {
import global._
- import definitions._
def isThisInScope = typer0.context.enclosingContextChain exists (_.tree.isInstanceOf[ImplDef])
if (isThisInScope) {
val enclosingClasses = typer0.context.enclosingContextChain map (_.tree) collect { case classDef: ClassDef => classDef }
diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala
index 4d1e22abe7..abd179b24b 100644
--- a/src/compiler/scala/reflect/reify/phases/Calculate.scala
+++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala
@@ -5,7 +5,6 @@ trait Calculate {
self: Reifier =>
import global._
- import definitions._
implicit class RichCalculateSymbol(sym: Symbol) {
def metalevel: Int = { assert(sym != null && sym != NoSymbol); localSymbols.getOrElse(sym, 0) }
@@ -30,7 +29,7 @@ trait Calculate {
* Merely traverses the reifiee and records local symbols along with their metalevels.
*/
val calculate = new Traverser {
- // see the explanation of metalevels in ``Metalevels''
+ // see the explanation of metalevels in `Metalevels`
var currMetalevel = 1
override def traverse(tree: Tree): Unit = tree match {
diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
index fbbd12a42f..c69263399f 100644
--- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala
+++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala
@@ -1,16 +1,17 @@
package scala.reflect.reify
package phases
+import scala.collection.{ mutable }
+
trait Metalevels {
self: Reifier =>
import global._
- import definitions._
/**
* Makes sense of cross-stage bindings.
*
- * ================
+ * ----------------
*
* Analysis of cross-stage bindings becomes convenient if we introduce the notion of metalevels.
* Metalevel of a tree is a number that gets incremented every time you reify something and gets decremented when you splice something.
@@ -32,27 +33,27 @@ trait Metalevels {
*
* 1) symbol.metalevel < curr_metalevel. In this case reifier will generate a free variable
* that captures both the name of the symbol (to be compiled successfully) and its value (to be run successfully).
- * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntClass.tpe, x))
+ * For example, x in Example 1 will be reified as follows: Ident(newFreeVar("x", IntTpe, x))
*
* 2) symbol.metalevel > curr_metalevel. This leads to a metalevel breach that violates intuitive perception of splicing.
* As defined in macro spec, splicing takes a tree and inserts it into another tree - as simple as that.
* However, how exactly do we do that in the case of y.splice? In this very scenario we can use dataflow analysis and inline it,
* but what if y were a var, and what if it were calculated randomly at runtime?
*
- * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of ``reify''),
+ * This question has a genuinely simple answer. Sure, we cannot resolve such splices statically (i.e. during macro expansion of `reify`),
* but now we have runtime toolboxes, so noone stops us from picking up that reified tree and evaluating it at runtime
- * (in fact, this is something that ``Expr.splice'' does transparently).
+ * (in fact, this is something that `Expr.splice` does transparently).
*
* This is akin to early vs late binding dilemma.
* The prior is faster, plus, the latter (implemented with reflection) might not work because of visibility issues or might be not available on all platforms.
* But the latter still has its uses, so I'm allowing metalevel breaches, but introducing the -Xlog-runtime-evals to log them.
*
- * upd. We no longer do that. In case of a runaway ``splice'' inside a `reify`, one will get a static error.
+ * upd. We no longer do that. In case of a runaway `splice` inside a `reify`, one will get a static error.
* Why? Unfortunately, the cute idea of transparently converting between static and dynamic splices has failed.
* 1) Runtime eval that services dynamic splices requires scala-compiler.jar, which might not be on library classpath
* 2) Runtime eval incurs a severe performance penalty, so it'd better to be explicit about it
*
- * ================
+ * ----------------
*
* As we can see, the only problem is the fact that lhs'es of `splice` can be code blocks that can capture variables from the outside.
* Code inside the lhs of an `splice` is not reified, while the code from the enclosing reify is.
@@ -71,7 +72,7 @@ trait Metalevels {
* Since the result of the inner reify is wrapped in a splice, it won't be reified
* together with the other parts of the outer reify, but will be inserted into that result verbatim.
*
- * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntClass.tpe, x)).
+ * The inner reify produces an Expr[Int] that wraps Ident(freeVar("x", IntTpe, x)).
* However the freevar the reification points to will vanish when the compiler processes the outer reify.
* That's why we need to replace that freevar with a regular symbol that will point to reified x.
*
@@ -102,7 +103,7 @@ trait Metalevels {
*/
val metalevels = new Transformer {
var insideSplice = false
- var inlineableBindings = scala.collection.mutable.Map[TermName, Tree]()
+ val inlineableBindings = mutable.Map[TermName, Tree]()
def withinSplice[T](op: => T) = {
val old = insideSplice
@@ -124,7 +125,7 @@ trait Metalevels {
withinSplice { super.transform(TreeSplice(ReifiedTree(universe, mirror, symtab1, rtree, tpe, rtpe, concrete))) }
case TreeSplice(splicee) =>
if (reifyDebug) println("entering splice: " + splicee)
- val breaches = splicee filter (sub => sub.hasSymbol && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
+ val breaches = splicee filter (sub => sub.hasSymbolField && sub.symbol != NoSymbol && sub.symbol.metalevel > 0)
if (!insideSplice && breaches.nonEmpty) {
// we used to convert dynamic splices into runtime evals transparently, but we no longer do that
// why? see comments above
@@ -135,7 +136,7 @@ trait Metalevels {
} else {
withinSplice { super.transform(tree) }
}
- // todo. also inline usages of ``inlineableBindings'' in the symtab itself
+ // todo. also inline usages of `inlineableBindings` in the symtab itself
// e.g. a free$Foo can well use free$x, if Foo is path-dependent w.r.t x
// FreeRef(_, _) check won't work, because metalevels of symbol table and body are different, hence, freerefs in symbol table look different from freerefs in body
case FreeRef(_, name) if inlineableBindings contains name =>
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index 8e13a45cdb..143424dac5 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -2,7 +2,6 @@ package scala.reflect.reify
package phases
import scala.runtime.ScalaRunTime.isAnyVal
-import scala.runtime.ScalaRunTime.isTuple
import scala.reflect.reify.codegen._
trait Reify extends GenSymbols
@@ -16,7 +15,6 @@ trait Reify extends GenSymbols
self: Reifier =>
import global._
- import definitions._
private object reifyStack {
def currents: List[Any] = state.reifyStack
@@ -37,7 +35,7 @@ trait Reify extends GenSymbols
/**
* Reifies any supported value.
- * For internal use only, use ``reified'' instead.
+ * For internal use only, use `reified` instead.
*/
def reify(reifee: Any): Tree = reifyStack.push(reifee)(reifee match {
// before adding some case here, in global scope, please, consider
@@ -59,4 +57,4 @@ trait Reify extends GenSymbols
case _ =>
throw new Error("reifee %s of type %s is not supported".format(reifee, reifee.getClass))
})
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index a320718084..5f53f558b4 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -48,13 +48,13 @@ trait Reshape {
val Template(parents, self, body) = impl
var body1 = trimAccessors(classDef, reshapeLazyVals(body))
body1 = trimSyntheticCaseClassMembers(classDef, body1)
- var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ val impl1 = Template(parents, self, body1).copyAttrs(impl)
ClassDef(mods, name, params, impl1).copyAttrs(classDef)
case moduledef @ ModuleDef(mods, name, impl) =>
val Template(parents, self, body) = impl
var body1 = trimAccessors(moduledef, reshapeLazyVals(body))
body1 = trimSyntheticCaseClassMembers(moduledef, body1)
- var impl1 = Template(parents, self, body1).copyAttrs(impl)
+ val impl1 = Template(parents, self, body1).copyAttrs(impl)
ModuleDef(mods, name, impl1).copyAttrs(moduledef)
case template @ Template(parents, self, body) =>
val discardedParents = parents collect { case tt: TypeTree => tt } filter isDiscarded
@@ -89,8 +89,8 @@ trait Reshape {
}
private def undoMacroExpansion(tree: Tree): Tree =
- tree.attachments.get[MacroExpansionAttachment] match {
- case Some(MacroExpansionAttachment(original)) =>
+ tree.attachments.get[analyzer.MacroExpansionAttachment] match {
+ case Some(analyzer.MacroExpansionAttachment(original, _)) =>
original match {
// this hack is necessary until I fix implicit macros
// so far tag materialization is implemented by sneaky macros hidden in scala-compiler.jar
@@ -116,7 +116,6 @@ trait Reshape {
private def toPreTyperModifiers(mods: Modifiers, sym: Symbol) = {
if (!sym.annotations.isEmpty) {
- val Modifiers(flags, privateWithin, annotations) = mods
val postTyper = sym.annotations filter (_.original != EmptyTree)
if (reifyDebug && !postTyper.isEmpty) println("reify symbol annotations for: " + sym)
if (reifyDebug && !postTyper.isEmpty) println("originals are: " + sym.annotations)
@@ -131,8 +130,8 @@ trait Reshape {
*
* NB: This is the trickiest part of reification!
*
- * In most cases, we're perfectly fine to reify a Type itself (see ``reifyType'').
- * However if the type involves a symbol declared inside the quasiquote (i.e. registered in ``boundSyms''),
+ * In most cases, we're perfectly fine to reify a Type itself (see `reifyType`).
+ * However if the type involves a symbol declared inside the quasiquote (i.e. registered in `boundSyms`),
* then we cannot reify it, or otherwise subsequent reflective compilation will fail.
*
* Why will it fail? Because reified deftrees (e.g. ClassDef(...)) will generate fresh symbols during that compilation,
@@ -140,7 +139,7 @@ trait Reshape {
* https://issues.scala-lang.org/browse/SI-5230
*
* To deal with this unpleasant fact, we need to fall back from types to equivalent trees (after all, parser trees don't contain any types, just trees, so it should be possible).
- * Luckily, these original trees get preserved for us in the ``original'' field when Trees get transformed into TypeTrees.
+ * Luckily, these original trees get preserved for us in the `original` field when Trees get transformed into TypeTrees.
* And if an original of a type tree is empty, we can safely assume that this type is non-essential (e.g. was inferred/generated by the compiler).
* In that case the type can be omitted (e.g. reified as an empty TypeTree), since it will be inferred again later on.
*
@@ -157,8 +156,8 @@ trait Reshape {
* upd. There are also problems with CompoundTypeTrees. I had to use attachments to retain necessary information.
*
* upd. Recently I went ahead and started using original for all TypeTrees, regardless of whether they refer to local symbols or not.
- * As a result, ``reifyType'' is never called directly by tree reification (and, wow, it seems to work great!).
- * The only usage of ``reifyType'' now is for servicing typetags, however, I have some ideas how to get rid of that as well.
+ * As a result, `reifyType` is never called directly by tree reification (and, wow, it seems to work great!).
+ * The only usage of `reifyType` now is for servicing typetags, however, I have some ideas how to get rid of that as well.
*/
private def isDiscarded(tt: TypeTree) = tt.original == null
private def toPreTyperTypeTree(tt: TypeTree): Tree = {
@@ -168,7 +167,7 @@ trait Reshape {
// if this assumption fails, please, don't be quick to add postprocessing here (like I did before)
// but rather try to fix this in Typer, so that it produces quality originals (like it's done for typedAnnotated)
if (reifyDebug) println("TypeTree, essential: %s (%s)".format(tt.tpe, tt.tpe.kind))
- if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original))
+ if (reifyDebug) println("verdict: rolled back to original %s".format(tt.original.toString.replaceAll("\\s+", " ")))
transform(tt.original)
} else {
// type is deemed to be non-essential
@@ -255,12 +254,12 @@ trait Reshape {
case _ => rhs // unit or trait case
}
val DefDef(mods0, name0, _, _, tpt0, rhs0) = ddef
- val name1 = nme.dropLocalSuffix(name0)
+ val name1 = name0.dropLocal
val Modifiers(flags0, privateWithin0, annotations0) = mods0
- var flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
+ val flags1 = (flags0 & GetterFlags) & ~(STABLE | ACCESSOR | METHOD)
val mods1 = Modifiers(flags1, privateWithin0, annotations0) setPositions mods0.positions
val mods2 = toPreTyperModifiers(mods1, ddef.symbol)
- ValDef(mods2, name1, tpt0, extractRhs(rhs0))
+ ValDef(mods2, name1.toTermName, tpt0, extractRhs(rhs0))
}
private def trimAccessors(deff: Tree, stats: List[Tree]): List[Tree] = {
@@ -272,9 +271,11 @@ trait Reshape {
def detectBeanAccessors(prefix: String): Unit = {
if (defdef.name.startsWith(prefix)) {
- var name = defdef.name.toString.substring(prefix.length)
+ val name = defdef.name.toString.substring(prefix.length)
def uncapitalize(s: String) = if (s.length == 0) "" else { val chars = s.toCharArray; chars(0) = chars(0).toLower; new String(chars) }
- def findValDef(name: String) = (symdefs.values collect { case vdef: ValDef if nme.dropLocalSuffix(vdef.name).toString == name => vdef }).headOption
+ def findValDef(name: String) = symdefs.values collectFirst {
+ case vdef: ValDef if vdef.name.dropLocal string_== name => vdef
+ }
val valdef = findValDef(name).orElse(findValDef(uncapitalize(name))).orNull
if (valdef != null) accessors(valdef) = accessors.getOrElse(valdef, Nil) :+ defdef
}
@@ -282,13 +283,13 @@ trait Reshape {
detectBeanAccessors("get")
detectBeanAccessors("set")
detectBeanAccessors("is")
- });
+ })
- var stats1 = stats flatMap {
+ val stats1 = stats flatMap {
case vdef @ ValDef(mods, name, tpt, rhs) if !mods.isLazy =>
val mods1 = if (accessors.contains(vdef)) {
val ddef = accessors(vdef)(0) // any accessor will do
- val Modifiers(flags, privateWithin, annotations) = mods
+ val Modifiers(flags, _, annotations) = mods
var flags1 = flags & ~LOCAL
if (!ddef.symbol.isPrivate) flags1 = flags1 & ~PRIVATE
val privateWithin1 = ddef.mods.privateWithin
@@ -298,8 +299,8 @@ trait Reshape {
mods
}
val mods2 = toPreTyperModifiers(mods1, vdef.symbol)
- val name1 = nme.dropLocalSuffix(name)
- val vdef1 = ValDef(mods2, name1, tpt, rhs)
+ val name1 = name.dropLocal
+ val vdef1 = ValDef(mods2, name1.toTermName, tpt, rhs)
if (reifyDebug) println("resetting visibility of field: %s => %s".format(vdef, vdef1))
Some(vdef1) // no copyAttrs here, because new ValDef and old symbols are now out of sync
case ddef: DefDef if !ddef.mods.isLazy =>
@@ -331,7 +332,8 @@ trait Reshape {
case Some(ddef) =>
toPreTyperLazyVal(ddef)
case None =>
- CannotReifyInvalidLazyVal(vdef)
+ if (reifyDebug) println("couldn't find corresponding lazy val accessor")
+ vdef
}
if (reifyDebug) println(s"reconstructed lazy val is $vdef1")
vdef1::Nil
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 59cd4e5047..d5f27dc119 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -11,7 +11,7 @@ trait Extractors {
// Example of a reified tree for `reify(List(1, 2))`:
// (also contains an example of a reified type as a third argument to the constructor of Expr)
// {
- // val $u: reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
+ // val $u: scala.reflect.runtime.universe.type = scala.reflect.runtime.`package`.universe;
// val $m: $u.Mirror = $u.runtimeMirror(Test.this.getClass().getClassLoader());
// $u.Expr[List[Int]]($m, {
// final class $treecreator1 extends scala.reflect.api.TreeCreator {
@@ -179,7 +179,7 @@ trait Extractors {
Literal(Constant(origin: String))))
if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
+ Some((uref1, name, reifyBinding(tree), flags, origin))
case _ =>
None
}
diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
index aca18c7df7..0903bc481c 100644
--- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
+++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala
@@ -11,8 +11,6 @@ trait NodePrinters {
self: Utils =>
import global._
- import definitions._
- import Flag._
object reifiedNodeToString extends (Tree => String) {
def apply(tree: Tree): String = {
@@ -25,8 +23,8 @@ trait NodePrinters {
// Rolling a full-fledged, robust TreePrinter would be several times more code.
// Also as of late we have tests that ensure that UX won't be broken by random changes to the reifier.
val lines = (tree.toString.split(EOL) drop 1 dropRight 1).toList splitAt 2
- var (List(universe, mirror), reification) = lines
- reification = (for (line <- reification) yield {
+ val (List(universe, mirror), reification0) = lines
+ val reification = (for (line <- reification0) yield {
var s = line substring 2
s = s.replace(nme.UNIVERSE_PREFIX.toString, "")
s = s.replace(".apply", "")
@@ -38,8 +36,8 @@ trait NodePrinters {
flagsAreUsed = true
show(m.group(1).toLong)
})
- s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()")
- s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
+ s = s.replace("Modifiers(0L, TypeName(\"\"), List())", "Modifiers()")
+ s = """Modifiers\((\d+)[lL], TypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => {
val buf = new scala.collection.mutable.ListBuffer[String]
val annotations = m.group(3)
@@ -48,7 +46,7 @@ trait NodePrinters {
val privateWithin = "" + m.group(2)
if (buf.nonEmpty || privateWithin != "")
- buf.append("newTypeName(\"" + privateWithin + "\")")
+ buf.append("TypeName(\"" + privateWithin + "\")")
val bits = m.group(1)
if (buf.nonEmpty || bits != "0L") {
@@ -73,14 +71,14 @@ trait NodePrinters {
s.trim
})
- val printout = scala.collection.mutable.ListBuffer[String]();
+ val printout = scala.collection.mutable.ListBuffer[String]()
printout += universe.trim
if (mirrorIsUsed) printout += mirror.replace("Mirror[", "scala.reflect.api.Mirror[").trim
- val imports = scala.collection.mutable.ListBuffer[String]();
- imports += nme.UNIVERSE_SHORT
+ val imports = scala.collection.mutable.ListBuffer[String]()
+ imports += nme.UNIVERSE_SHORT.toString
// if (buildIsUsed) imports += nme.build
- if (mirrorIsUsed) imports += nme.MIRROR_SHORT
- if (flagsAreUsed) imports += nme.Flag
+ if (mirrorIsUsed) imports += nme.MIRROR_SHORT.toString
+ if (flagsAreUsed) imports += nme.Flag.toString
printout += s"""import ${imports map (_ + "._") mkString ", "}"""
val name = if (isExpr) "tree" else "tpe"
diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
index dbb0836e0a..5f8de9894f 100644
--- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
+++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala
@@ -8,8 +8,6 @@ trait SymbolTables {
self: Utils =>
import global._
- import definitions._
- import Flag._
class SymbolTable private[SymbolTable] (
private[SymbolTable] val symtab: immutable.ListMap[Symbol, Tree] = immutable.ListMap[Symbol, Tree](),
@@ -17,9 +15,6 @@ trait SymbolTables {
private[SymbolTable] val original: Option[List[Tree]] = None) {
def syms: List[Symbol] = symtab.keys.toList
- def isConcrete: Boolean = symtab.values forall (sym => !FreeTypeDef.unapply(sym).isDefined)
-
-// def aliases: Map[Symbol, List[TermName]] = aliases.distinct groupBy (_._1) mapValues (_ map (_._2))
def symDef(sym: Symbol): Tree =
symtab.getOrElse(sym, EmptyTree)
@@ -89,11 +84,6 @@ trait SymbolTables {
add(ValDef(NoMods, freshName(name0), TypeTree(), reification) updateAttachment bindingAttachment)
}
- private def add(sym: Symbol, name: TermName): SymbolTable = {
- if (!(syms contains sym)) error("cannot add an alias to a symbol not in the symbol table")
- add(sym, name, EmptyTree)
- }
-
private def remove(sym: Symbol): SymbolTable = {
val newSymtab = symtab - sym
val newAliases = aliases filter (_._1 != sym)
@@ -107,7 +97,7 @@ trait SymbolTables {
newSymtab = newSymtab map { case ((sym, tree)) =>
val ValDef(mods, primaryName, tpt, rhs) = tree
val tree1 =
- if (!(newAliases contains (sym, primaryName))) {
+ if (!(newAliases contains ((sym, primaryName)))) {
val primaryName1 = newAliases.find(_._1 == sym).get._2
ValDef(mods, primaryName1, tpt, rhs).copyAttrs(tree)
} else tree
@@ -143,7 +133,7 @@ trait SymbolTables {
var result = new SymbolTable(original = Some(encoded))
encoded foreach (entry => (entry.attachments.get[ReifyBindingAttachment], entry.attachments.get[ReifyAliasAttachment]) match {
case (Some(ReifyBindingAttachment(_)), _) => result += entry
- case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ (sym, alias))
+ case (_, Some(ReifyAliasAttachment(sym, alias))) => result = new SymbolTable(result.symtab, result.aliases :+ ((sym, alias)))
case _ => // do nothing, this is boilerplate that can easily be recreated by subsequent `result.encode`
})
result
@@ -214,4 +204,4 @@ trait SymbolTables {
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala
index 255efe55ec..3c1bc8cad9 100644
--- a/src/compiler/scala/tools/ant/Pack200Task.scala
+++ b/src/compiler/scala/tools/ant/Pack200Task.scala
@@ -65,7 +65,7 @@ class Pack200Task extends ScalaMatchingTask {
/** Set the flag to specify if file reordering should be performed. Reordering
* is used to remove empty packages and improve pack200 optimization.
- * @param keep
+ * @param x
* `'''true'''` to retain file ordering.
* `'''false'''` to optimize directory structure (DEFAULT). */
def setKeepFileOrder(x: Boolean) { keepFileOrder = x }
@@ -99,8 +99,8 @@ class Pack200Task extends ScalaMatchingTask {
private def getFileList: List[File] = {
var files: List[File] = Nil
val fs = getImplicitFileSet
- var ds = fs.getDirectoryScanner(getProject())
- var dir = fs.getDir(getProject())
+ val ds = fs.getDirectoryScanner(getProject())
+ val dir = fs.getDir(getProject())
for (filename <- ds.getIncludedFiles()
if filename.toLowerCase.endsWith(".jar")) {
val file = new File(dir, filename)
diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala
index e53679f052..6036b238b6 100644
--- a/src/compiler/scala/tools/ant/Same.scala
+++ b/src/compiler/scala/tools/ant/Same.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.ant
+package scala
+package tools.ant
import java.io.{File, FileInputStream}
@@ -32,7 +33,7 @@ import org.apache.tools.ant.types.Mapper
*
* @author Gilles Dubochet
* @version 1.0 */
-class Same extends ScalaMatchingTask {
+@deprecated("Use diff", "2.11.0") class Same extends ScalaMatchingTask {
/*============================================================================*\
** Ant user-properties **
\*============================================================================*/
@@ -110,7 +111,7 @@ class Same extends ScalaMatchingTask {
\*============================================================================*/
override def execute() = {
- validateAttributes
+ validateAttributes()
val mapper = getMapper
allEqualNow = true
val originNames: Array[String] = getDirectoryScanner(origin.get).getIncludedFiles
diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala
index 57d24f6213..e7ac53c8fb 100644
--- a/src/compiler/scala/tools/ant/ScalaTool.scala
+++ b/src/compiler/scala/tools/ant/ScalaTool.scala
@@ -108,7 +108,7 @@ class ScalaTool extends ScalaMatchingTask {
* for general purpose scripts, as this does not assume all elements are
* relative to the Ant `basedir`. Additionally, the platform specific
* demarcation of any script variables (e.g. `${SCALA_HOME}` or
- * `%SCALA_HOME%`) can be specified in a platform independant way (e.g.
+ * `%SCALA_HOME%`) can be specified in a platform independent way (e.g.
* `@SCALA_HOME@`) and automatically translated for you.
*/
def setClassPath(input: String) {
@@ -190,13 +190,13 @@ class ScalaTool extends ScalaMatchingTask {
val builder = new StringBuilder()
while (chars.hasNext) {
- val char = chars.next
+ val char = chars.next()
if (char == '@') {
- var char = chars.next
+ var char = chars.next()
val token = new StringBuilder()
while (chars.hasNext && char != '@') {
token.append(char)
- char = chars.next
+ char = chars.next()
}
if (token.toString == "")
builder.append('@')
@@ -212,13 +212,13 @@ class ScalaTool extends ScalaMatchingTask {
val builder = new StringBuilder()
while (chars.hasNext) {
- val char = chars.next
+ val char = chars.next()
if (char == '@') {
- var char = chars.next
+ var char = chars.next()
val token = new StringBuilder()
while (chars.hasNext && char != '@') {
token.append(char)
- char = chars.next
+ char = chars.next()
}
if (tokens.contains(token.toString))
builder.append(tokens(token.toString))
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index 73d09e82ba..b2cedf6338 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -55,8 +55,6 @@ import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
* - `usejavacp`,
* - `failonerror`,
* - `scalacdebugging`,
- * - `assemname`,
- * - `assemrefs`.
*
* It also takes the following parameters as nested elements:
* - `src` (for `srcdir`),
@@ -99,7 +97,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Defines valid values for the `target` property. */
object Target extends PermissibleValue {
- val values = List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil")
+ val values = List("jvm-1.5", "jvm-1.6", "jvm-1.7")
}
/** Defines valid values for the `deprecation` and `unchecked` properties. */
@@ -169,11 +167,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
/** Indicates whether compilation errors will fail the build; defaults to true. */
protected var failonerror: Boolean = true
- // Name of the output assembly (only relevant with -target:msil)
- protected var assemname: Option[String] = None
- // List of assemblies referenced by the program (only relevant with -target:msil)
- protected var assemrefs: Option[String] = None
-
/** Prints out the files being compiled by the scalac ant task
* (not only the number of files). */
protected var scalacDebugging: Boolean = false
@@ -420,9 +413,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
* @param input The specified flag */
def setScalacdebugging(input: Boolean) { scalacDebugging = input }
- def setAssemname(input: String) { assemname = Some(input) }
- def setAssemrefs(input: String) { assemrefs = Some(input) }
-
/** Sets the `compilerarg` as a nested compilerarg Ant parameter.
* @return A compiler argument to be configured. */
def createCompilerArg(): ImplementationSpecificArgument = {
@@ -505,7 +495,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
path.map(asString) mkString File.pathSeparator
/** Transforms a file into a Scalac-readable string.
- * @param path A file to convert.
+ * @param file A file to convert.
* @return A string-representation of the file like `/x/k/a.scala`. */
protected def asString(file: File): String =
file.getAbsolutePath()
@@ -518,7 +508,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
new Settings(error)
protected def newGlobal(settings: Settings, reporter: Reporter) =
- new Global(settings, reporter)
+ Global(settings, reporter)
/*============================================================================*\
** The big execute method **
@@ -612,9 +602,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
if (!unchecked.isEmpty) settings.unchecked.value = unchecked.get
if (!usejavacp.isEmpty) settings.usejavacp.value = usejavacp.get
- if (!assemname.isEmpty) settings.assemname.value = assemname.get
- if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
-
val jvmargs = scalacCompilerArgs.getArgs filter (_ startsWith "-J")
if (!jvmargs.isEmpty) settings.jvmargs.value = jvmargs.toList
val defines = scalacCompilerArgs.getArgs filter (_ startsWith "-D")
@@ -685,7 +672,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
file
}
- val res = execWithArgFiles(java, List(writeSettings.getAbsolutePath))
+ val res = execWithArgFiles(java, List(writeSettings().getAbsolutePath))
if (failonerror && res != 0)
buildError("Compilation failed because of an internal compiler error;"+
" see the error output for details.")
diff --git a/src/compiler/scala/tools/ant/antlib.xml b/src/compiler/scala/tools/ant/antlib.xml
index 78159e6d10..7885534689 100644
--- a/src/compiler/scala/tools/ant/antlib.xml
+++ b/src/compiler/scala/tools/ant/antlib.xml
@@ -11,8 +11,6 @@
classname="scala.tools.ant.Scaladoc"/>
<taskdef name="scalatool"
classname="scala.tools.ant.ScalaTool"/>
- <taskdef name="same"
- classname="scala.tools.ant.Same"/>
<taskdef name="pack200"
classname="scala.tools.ant.Pack200Task"/>
</antlib>
diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala
index 0b6701b6e9..b170ceaed8 100644
--- a/src/compiler/scala/tools/ant/sabbus/Break.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Break.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala
index 5274594f3d..027a828f03 100644
--- a/src/compiler/scala/tools/ant/sabbus/Make.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Make.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant.sabbus
+package scala
+package tools.ant.sabbus
import java.io.File
import org.apache.tools.ant.Task
diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
index 9cdf484080..595b45ae51 100644
--- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
+++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.ant
+package scala
+package tools.ant
package sabbus
import java.io.{ File, FileWriter }
@@ -80,7 +81,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
private def createMapper() = {
val mapper = new GlobPatternMapper()
- val extension = if (isMSIL) "*.msil" else "*.class"
+ val extension = "*.class"
mapper setTo extension
mapper setFrom "*.scala"
@@ -104,9 +105,6 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
sourcePath foreach (settings.sourcepath = _)
settings.extraParams = extraArgsFlat
- if (isMSIL)
- settings.sourcedir = sourceDir
-
val mapper = createMapper()
val includedFiles: Array[File] =
@@ -117,12 +115,12 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs {
mapper
) map (x => new File(sourceDir, x))
- /** Nothing to do. */
+ /* Nothing to do. */
if (includedFiles.isEmpty && argfile.isEmpty)
return
if (includedFiles.nonEmpty)
- log("Compiling %d file%s to %s".format(includedFiles.size, plural(includedFiles.size), destinationDir))
+ log("Compiling %d file%s to %s".format(includedFiles.length, plural(includedFiles.length), destinationDir))
argfile foreach (x => log("Using argfile file: @" + x))
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index fde61e9564..d0fefdaa03 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -10,7 +10,7 @@ package scala.tools.ant.sabbus
import java.io.File
-import org.apache.tools.ant.types.{Path, Reference}
+import org.apache.tools.ant.types.Path
class Settings {
diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
index 6bb1aaa306..b061bcf7fb 100644
--- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
+++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala
@@ -98,6 +98,4 @@ trait TaskArgs extends CompilationPathProperty {
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
}
-
- def isMSIL = compTarget exists (_ == "msil")
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala
index 2c97232aec..a8736f228b 100644
--- a/src/compiler/scala/tools/ant/sabbus/Use.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Use.scala
@@ -7,7 +7,8 @@
\* */
-package scala.tools.ant
+package scala
+package tools.ant
package sabbus
import java.io.File
@@ -53,9 +54,9 @@ class Use extends ScalaMatchingTask {
compiler.settings.d,
mapper
) map (new File(sourceDir.get, _))
- if (includedFiles.size > 0)
+ if (includedFiles.length > 0)
try {
- log("Compiling " + includedFiles.size + " file" + (if (includedFiles.size > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
+ log("Compiling " + includedFiles.length + " file" + (if (includedFiles.length > 1) "s" else "") + " to " + compiler.settings.d.getAbsolutePath)
val (errors, warnings) = compiler.compile(includedFiles)
if (errors > 0)
sys.error("Compilation failed with " + errors + " error" + (if (errors > 1) "s" else "") + ".")
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index f1c6c52785..84ccaba749 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -102,6 +102,9 @@ if [[ -n "$cygwin" ]]; then
format=windows
fi
SCALA_HOME="$(cygpath --$format "$SCALA_HOME")"
+ if [[ -n "$JAVA_HOME" ]]; then
+ JAVA_HOME="$(cygpath --$format "$JAVA_HOME")"
+ fi
TOOL_CLASSPATH="$(cygpath --path --$format "$TOOL_CLASSPATH")"
elif [[ -n "$mingw" ]]; then
SCALA_HOME="$(cmd //c echo "$SCALA_HOME")"
diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala
index 75f96d3c4b..e8ac882ee6 100644
--- a/src/compiler/scala/tools/cmd/CommandLine.scala
+++ b/src/compiler/scala/tools/cmd/CommandLine.scala
@@ -19,7 +19,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
def this(spec: Reference, line: String) = this(spec, Parser tokenize line)
def this(spec: Reference, args: Array[String]) = this(spec, args.toList)
- import spec.{ isAnyOption, isUnaryOption, isBinaryOption, isExpandOption }
+ import spec.{ isUnaryOption, isBinaryOption, isExpandOption }
val Terminator = "--"
val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
@@ -36,7 +36,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
def loop(args: List[String]): Map[String, String] = {
def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
- /** Returns Some(List(args)) if this option expands to an
+ /* Returns Some(List(args)) if this option expands to an
* argument list and it's not returning only the same arg.
*/
def expand(s1: String) = {
@@ -48,7 +48,7 @@ class CommandLine(val spec: Reference, val originalArgs: List[String]) extends C
else None
}
- /** Assumes known options have all been ruled out already. */
+ /* Assumes known options have all been ruled out already. */
def isUnknown(opt: String) =
onlyKnownOptions && (opt startsWith "-") && {
errorFn("Option '%s' not recognized.".format(opt))
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/src/compiler/scala/tools/cmd/Demo.scala
index af818845bb..fc90140f8f 100644
--- a/src/compiler/scala/tools/cmd/Demo.scala
+++ b/src/compiler/scala/tools/cmd/Demo.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package cmd
/** A sample command specification for illustrative purposes.
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index cba2e99998..433bbb167e 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -6,7 +6,7 @@
package scala.tools
package cmd
-import nsc.io.{ Path, File, Directory }
+import scala.tools.nsc.io.{ File, Directory }
import scala.reflect.runtime.{universe => ru}
import scala.tools.reflect.StdRuntimeTags._
@@ -24,18 +24,11 @@ abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction
}
object FromString {
- // We need these because we clash with the String => Path implicits.
- private def toFile(s: String) = new File(new java.io.File(s))
+ // We need this because we clash with the String => Path implicits.
private def toDir(s: String) = new Directory(new java.io.File(s))
/** Path related stringifiers.
*/
- val ExistingFile: FromString[File] = new FromString[File]()(tagOfFile) {
- override def isDefinedAt(s: String) = toFile(s).isFile
- def apply(s: String): File =
- if (isDefinedAt(s)) toFile(s)
- else cmd.runAndExit(println("'%s' is not an existing file." format s))
- }
val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) {
override def isDefinedAt(s: String) = toDir(s).isDirectory
def apply(s: String): Directory =
diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala
index abffd6bb2e..d1c798b621 100644
--- a/src/compiler/scala/tools/cmd/Interpolation.scala
+++ b/src/compiler/scala/tools/cmd/Interpolation.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package cmd
/** Interpolation logic for generated files. The idea is to be
diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala
index bcbb454771..ec2a414065 100644
--- a/src/compiler/scala/tools/cmd/Reference.scala
+++ b/src/compiler/scala/tools/cmd/Reference.scala
@@ -26,7 +26,6 @@ trait Reference extends Spec {
def isUnaryOption(s: String) = unary contains toOpt(s)
def isBinaryOption(s: String) = binary contains toOpt(s)
def isExpandOption(s: String) = expansionMap contains toOpt(s)
- def isAnyOption(s: String) = isUnaryOption(s) || isBinaryOption(s) || isExpandOption(s)
def expandArg(arg: String) = expansionMap.getOrElse(fromOpt(arg), List(arg))
@@ -46,7 +45,7 @@ object Reference {
val MaxLine = 80
class Accumulators() {
- private var _help = new ListBuffer[() => String]
+ private val _help = new ListBuffer[() => String]
private var _unary = List[String]()
private var _binary = List[String]()
private var _expand = Map[String, List[String]]()
diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
index dbd2195938..7e01afac2b 100644
--- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala
+++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala
@@ -13,7 +13,7 @@ trait AnyValReps {
sealed abstract class AnyValNum(name: String, repr: Option[String], javaEquiv: String) extends AnyValRep(name,repr,javaEquiv) {
- case class Op(val op : String, val doc : String)
+ case class Op(op : String, doc : String)
private def companionCoercions(tos: AnyValRep*) = {
tos.toList map (to =>
@@ -22,8 +22,8 @@ trait AnyValReps {
}
def coercionCommentExtra = ""
def coercionComment = """
- /** Language mandated coercions from @name@ to "wider" types.%s
- */""".format(coercionCommentExtra)
+/** Language mandated coercions from @name@ to "wider" types.%s
+ */""".format(coercionCommentExtra)
def implicitCoercions: List[String] = {
val coercions = this match {
@@ -35,7 +35,7 @@ trait AnyValReps {
case _ => Nil
}
if (coercions.isEmpty) Nil
- else coercionComment :: coercions
+ else coercionComment.lines.toList ++ coercions
}
def isCardinal: Boolean = isIntegerType(this)
@@ -183,7 +183,7 @@ trait AnyValReps {
}
def objectLines = {
val comp = if (isCardinal) cardinalCompanion else floatingCompanion
- (comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ implicitCoercions map interpolate
+ interpolate(comp + allCompanions + "\n" + nonUnitCompanions).trim.lines.toList ++ (implicitCoercions map interpolate)
}
/** Makes a set of binary operations based on the given set of ops, args, and resultFn.
@@ -209,11 +209,14 @@ trait AnyValReps {
)
def lcname = name.toLowerCase
+ def boxedSimpleName = this match {
+ case C => "Character"
+ case I => "Integer"
+ case _ => name
+ }
def boxedName = this match {
case U => "scala.runtime.BoxedUnit"
- case C => "java.lang.Character"
- case I => "java.lang.Integer"
- case _ => "java.lang." + name
+ case _ => "java.lang." + boxedSimpleName
}
def zeroRep = this match {
case L => "0L"
@@ -228,7 +231,13 @@ trait AnyValReps {
def indentN(s: String) = s.lines map indent mkString "\n"
def boxUnboxImpls = Map(
+ "@boxRunTimeDoc@" -> """
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(boxedSimpleName),
"@boxImpl@" -> "%s.valueOf(x)".format(boxedName),
+ "@unboxRunTimeDoc@" -> """
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxTo%s`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *""".format(name),
"@unboxImpl@" -> "x.asInstanceOf[%s].%sValue()".format(boxedName, lcname),
"@unboxDoc@" -> "the %s resulting from calling %sValue() on `x`".format(name, lcname)
)
@@ -299,7 +308,7 @@ import scala.language.implicitConversions
def allCompanions = """
/** Transform a value type into a boxed reference type.
- *
+ *@boxRunTimeDoc@
* @param x the @name@ to be boxed
* @return a @boxed@ offering `x` as its underlying value.
*/
@@ -308,7 +317,7 @@ def box(x: @name@): @boxed@ = @boxImpl@
/** Transform a boxed type into a value type. Note that this
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a @boxed@.
- *
+ *@unboxRunTimeDoc@
* @param x the @boxed@ to be unboxed.
* @throws ClassCastException if the argument is not a @boxed@
* @return @unboxDoc@
@@ -471,7 +480,9 @@ override def getClass(): Class[Boolean] = null
def objectLines = interpolate(allCompanions).lines.toList
override def boxUnboxImpls = Map(
+ "@boxRunTimeDoc@" -> "",
"@boxImpl@" -> "scala.runtime.BoxedUnit.UNIT",
+ "@unboxRunTimeDoc@" -> "",
"@unboxImpl@" -> "()",
"@unboxDoc@" -> "the Unit value ()"
)
diff --git a/src/compiler/scala/tools/cmd/gen/Codegen.scala b/src/compiler/scala/tools/cmd/gen/Codegen.scala
index 4ca9b6cac7..b49322ab4a 100644
--- a/src/compiler/scala/tools/cmd/gen/Codegen.scala
+++ b/src/compiler/scala/tools/cmd/gen/Codegen.scala
@@ -23,7 +23,7 @@ object Codegen {
return println (CodegenSpec.helpMsg)
val out = outDir getOrElse { return println("--out is required.") }
- val all = genall || (!anyvals && !products)
+ val all = genall || !anyvals
echo("Generating sources into " + out)
diff --git a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
index 903517c5b4..4b4a1e482d 100644
--- a/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
+++ b/src/compiler/scala/tools/cmd/gen/CodegenSpec.scala
@@ -12,17 +12,11 @@ trait CodegenSpec extends Spec with Meta.StdOpts with Interpolation {
def referenceSpec = CodegenSpec
def programInfo = Spec.Info("codegen", "", "scala.tools.cmd.gen.Codegen")
- import FromString.ExistingDir
-
help("Usage: codegen [<options>]")
- // val inDir = "in" / "directory containing templates" --^ ExistingDir
val outDir = "out" / "directory for generated files" --^ ExistingDir
- // val install = "install" / "write source files directly to src/library/scala"
val anyvals = "anyvals" / "generate sources for AnyVal types" --?
- val products = "products" / "generate sources for ProductN, FunctionN, etc." --?
val genall = "all" / "generate sources for everything" --?
- val stamp = "stamp" / "add a timestamp to the generated files" --?
}
object CodegenSpec extends CodegenSpec with Reference {
diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala
index d605ecae8f..c62a977950 100644
--- a/src/compiler/scala/tools/cmd/package.scala
+++ b/src/compiler/scala/tools/cmd/package.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package object cmd {
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 355a1fd262..15d365ab8c 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
import util.FreshNameCreator
-import scala.reflect.internal.util.{ Position, NoPosition, BatchSourceFile, SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
@@ -26,7 +26,7 @@ trait CompilationUnits { self: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- var fresh: FreshNameCreator = new FreshNameCreator.Default
+ val fresh: FreshNameCreator = new FreshNameCreator.Default
def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
@@ -36,24 +36,34 @@ trait CompilationUnits { self: Global =>
def exists = source != NoSourceFile && source != null
-// def parseSettings() = {
-// val argsmarker = "SCALAC_ARGS"
-// if(comments nonEmpty) {
-// val pragmas = comments find (_.text.startsWith("//#")) // only parse first one
-// pragmas foreach { p =>
-// val i = p.text.indexOf(argsmarker)
-// if(i > 0)
-// }
-// }
-// }
/** Note: depends now contains toplevel classes.
* To get their sourcefiles, you need to dereference with .sourcefile
*/
- val depends = mutable.HashSet[Symbol]()
+ private[this] val _depends = mutable.HashSet[Symbol]()
+ // SBT compatibility (SI-6875)
+ //
+ // imagine we have a file named A.scala, which defines a trait named Foo and a module named Main
+ // Main contains a call to a macro, which calls c.introduceTopLevel to define a mock for Foo
+ // c.introduceTopLevel creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo,
+ // and macro expansion instantiates FooMock. the stage is now set. let's see what happens next.
+ //
+ // without this workaround in scalac or without being patched itself, sbt will think that
+ // * Virt35af32 depends on A (because it extends Foo from A)
+ // * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32)
+ //
+ // after compiling A.scala, SBT will notice that it has a new source file named Virt35af32.
+ // it will also think that this file hasn't yet been compiled and since A depends on it
+ // it will think that A needs to be recompiled.
+ //
+ // recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock,
+ // producing another virtual file, say, Virtee509a, which will again trick SBT into thinking that A needs a recompile,
+ // which will lead to another macro expansion, which will produce another virtual file and so on
+ def depends = if (exists && !source.file.isVirtual) _depends else mutable.HashSet[Symbol]()
/** so we can relink
*/
- val defined = mutable.HashSet[Symbol]()
+ private[this] val _defined = mutable.HashSet[Symbol]()
+ def defined = if (exists && !source.file.isVirtual) _defined else mutable.HashSet[Symbol]()
/** Synthetic definitions generated by namer, eliminated by typer.
*/
@@ -123,18 +133,5 @@ trait CompilationUnits { self: Global =>
lazy val isJava = source.file.name.endsWith(".java")
override def toString() = source.toString()
-
- def clear() {
- fresh = new FreshNameCreator.Default
- body = EmptyTree
- depends.clear()
- defined.clear()
- synthetics.clear()
- toCheck.clear()
- checkedFeatures = Set()
- icode.clear()
- }
}
}
-
-
diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala
index 731f6926f0..3017d8c9cc 100644
--- a/src/compiler/scala/tools/nsc/CompileClient.scala
+++ b/src/compiler/scala/tools/nsc/CompileClient.scala
@@ -3,12 +3,12 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
-import java.io.{ BufferedReader, File, InputStreamReader, PrintWriter }
import settings.FscSettings
import scala.tools.util.CompileOutputCommon
-import sys.SystemProperties.preferIPv4Stack
+import scala.sys.SystemProperties.preferIPv4Stack
/** The client part of the fsc offline compiler. Instead of compiling
* things itself, it send requests to a CompileServer.
@@ -27,12 +27,12 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon {
val settings = new FscSettings(Console.println)
val command = new OfflineCompilerCommand(args.toList, settings)
val shutdown = settings.shutdown.value
- val extraVmArgs = if (settings.preferIPv4.value) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
+ val extraVmArgs = if (settings.preferIPv4) List("-D%s=true".format(preferIPv4Stack.key)) else Nil
val vmArgs = settings.jvmargs.unparse ++ settings.defines.unparse ++ extraVmArgs
val fscArgs = args.toList ++ command.extraFscArgs
- if (settings.version.value) {
+ if (settings.version) {
Console println versionMsg
return true
}
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index 7a0a072bb8..c5366566d9 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
-import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream }
+import java.io.PrintStream
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
import scala.reflect.internal.util.FakePos //Position
import scala.tools.util.SocketServer
@@ -29,8 +29,6 @@ class StandardCompileServer extends SocketServer {
var shutdown = false
var verbose = false
- val versionMsg = "Fast " + Properties.versionMsg
-
val MaxCharge = 0.8
private val runtime = Runtime.getRuntime()
@@ -58,9 +56,6 @@ class StandardCompileServer extends SocketServer {
(totalMemory - freeMemory).toDouble / maxMemory.toDouble > MaxCharge
}
- protected def newOfflineCompilerCommand(arguments: List[String], settings: FscSettings): OfflineCompilerCommand =
- new OfflineCompilerCommand(arguments, settings)
-
/** Problematically, Settings are only considered equal if every setting
* is exactly equal. In fsc this immediately breaks down because the randomly
* chosen temporary outdirs differ between client and server. Among other
@@ -92,7 +87,7 @@ class StandardCompileServer extends SocketServer {
val args = input.split("\0", -1).toList
val newSettings = new FscSettings(fscError)
- val command = newOfflineCompilerCommand(args, newSettings)
+ val command = new OfflineCompilerCommand(args, newSettings)
this.verbose = newSettings.verbose.value
info("Settings after normalizing paths: " + newSettings)
@@ -120,7 +115,7 @@ class StandardCompileServer extends SocketServer {
reporter = new ConsoleReporter(newSettings, in, out) {
// disable prompts, so that compile server cannot block
- override def displayPrompt = ()
+ override def displayPrompt() = ()
}
def isCompilerReusable: Boolean = {
if (compiler == null) {
@@ -162,7 +157,7 @@ class StandardCompileServer extends SocketServer {
}
}
reporter.printSummary()
- if (isMemoryFullEnough) {
+ if (isMemoryFullEnough()) {
info("Nulling out compiler due to memory utilization.")
clearCompiler()
}
@@ -177,9 +172,9 @@ object CompileServer extends StandardCompileServer {
private def createRedirect(filename: String) =
new PrintStream((redirectDir / filename).createFile().bufferedOutput())
- def main(args: Array[String]) =
+ def main(args: Array[String]) =
execute(() => (), args)
-
+
/**
* Used for internal testing. The callback is called upon
* server start, notifying the caller that the server is
@@ -204,7 +199,7 @@ object CompileServer extends StandardCompileServer {
compileSocket setPort port
startupCallback()
run()
-
+
compileSocket deletePort port
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 4051bda914..88d5c31b5d 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -5,13 +5,9 @@
package scala.tools.nsc
-import java.io.{ IOException, FileNotFoundException, PrintWriter, FileOutputStream }
-import java.io.{ BufferedReader, FileReader }
-import java.util.regex.Pattern
-import java.net._
+import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream }
import java.security.SecureRandom
import io.{ File, Path, Directory, Socket }
-import scala.util.control.Exception.catching
import scala.tools.util.CompileOutputCommon
import scala.reflect.internal.util.StringOps.splitWhere
import scala.sys.process._
@@ -117,7 +113,7 @@ class CompileSocket extends CompileOutputCommon {
*/
def getPort(vmArgs: String): Int = {
val maxPolls = 300
- val sleepTime = 25
+ val sleepTime = 25L
var attempts = 0
var port = pollPort()
@@ -156,9 +152,9 @@ class CompileSocket extends CompileOutputCommon {
* cannot be established.
*/
def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = {
- val maxMillis = 10 * 1000 // try for 10 seconds
- val retryDelay = 50
- val maxAttempts = maxMillis / retryDelay
+ val maxMillis = 10L * 1000 // try for 10 seconds
+ val retryDelay = 50L
+ val maxAttempts = (maxMillis / retryDelay).toInt
def getsock(attempts: Int): Option[Socket] = attempts match {
case 0 => warn("Unable to establish connection to compilation daemon") ; None
@@ -190,7 +186,7 @@ class CompileSocket extends CompileOutputCommon {
catch { case _: NumberFormatException => None }
def getSocket(serverAdr: String): Socket = (
- for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield
+ for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield
getSocket(name, port)
) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr)
diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala
index e994150f6f..f1f5130fb8 100644
--- a/src/compiler/scala/tools/nsc/CompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala
@@ -5,7 +5,6 @@
package scala.tools.nsc
-import scala.collection.mutable.ListBuffer
import io.File
/** A class representing command line info for scalac */
@@ -15,9 +14,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
type Setting = Settings#Setting
- /** file extensions of files that the compiler can process */
- lazy val fileEndings = Properties.fileEndings
-
private val processArgumentsResult =
if (shouldProcessArguments) processArguments
else (true, Nil)
@@ -41,8 +37,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
""".stripMargin.trim + "\n"
def shortUsage = "Usage: %s <options> <source files>" format cmdName
- def createUsagePreface(shouldExplain: Boolean) =
- if (shouldExplain) shortUsage + "\n" + explainAdvanced else ""
/** Creates a help message for a subset of options based on cond */
def createUsageMsg(cond: Setting => Boolean): String = {
@@ -82,9 +76,9 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
}
/** Messages explaining usage and options */
- def usageMsg = createUsageMsg("where possible standard", false, _.isStandard)
- def xusageMsg = createUsageMsg("Possible advanced", true, _.isAdvanced)
- def yusageMsg = createUsageMsg("Possible private", true, _.isPrivate)
+ def usageMsg = createUsageMsg("where possible standard", shouldExplain = false, _.isStandard)
+ def xusageMsg = createUsageMsg("Possible advanced", shouldExplain = true, _.isAdvanced)
+ def yusageMsg = createUsageMsg("Possible private", shouldExplain = true, _.isPrivate)
// If any of these settings is set, the compiler shouldn't start;
// an informative message of some sort should be printed instead.
@@ -128,6 +122,6 @@ class CompilerCommand(arguments: List[String], val settings: Settings) {
case x => List(x)
}
- settings.processArguments(expandedArguments, true)
+ settings.processArguments(expandedArguments, processAll = true)
}
}
diff --git a/src/compiler/scala/tools/nsc/CompilerRun.scala b/src/compiler/scala/tools/nsc/CompilerRun.scala
deleted file mode 100644
index 6746b08155..0000000000
--- a/src/compiler/scala/tools/nsc/CompilerRun.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-
-class CompilerRun {
- def firstPhase: Phase = NoPhase
- def terminalPhase: Phase = NoPhase
- def namerPhase: Phase = NoPhase
- def typerPhase: Phase = NoPhase
- def refchecksPhase: Phase = NoPhase
- def explicitouterPhase: Phase = NoPhase
- def erasurePhase: Phase = NoPhase
- def flattenPhase: Phase = NoPhase
- def mixinPhase: Phase = NoPhase
- def icodePhase: Phase = NoPhase
- def phaseNamed(name: String): Phase = NoPhase
-}
-
diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
index 5c5606e98b..6c16d19d2c 100644
--- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala
+++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala
@@ -13,9 +13,9 @@ import java.io.Writer
* @version 1.0
*/
class ConsoleWriter extends Writer {
- def close = flush
+ def close() = flush()
- def flush = Console.flush
+ def flush() = Console.flush()
def write(cbuf: Array[Char], off: Int, len: Int) {
if (len > 0)
diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala
index 814bd58a66..3ac27a42e8 100644
--- a/src/compiler/scala/tools/nsc/Driver.scala
+++ b/src/compiler/scala/tools/nsc/Driver.scala
@@ -1,11 +1,12 @@
-package scala.tools.nsc
+package scala
+package tools.nsc
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
+import scala.tools.nsc.reporters.ConsoleReporter
import Properties.{ versionString, copyrightString, residentPromptString }
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos }
+import scala.reflect.internal.util.FakePos
abstract class Driver {
-
+
val prompt = residentPromptString
val versionMsg = "Scala compiler " +
@@ -41,7 +42,7 @@ abstract class Driver {
command = new CompilerCommand(args.toList, ss)
settings = command.settings
- if (settings.version.value) {
+ if (settings.version) {
reporter.echo(versionMsg)
} else if (processSettingsHook()) {
val compiler = newCompiler()
@@ -68,4 +69,4 @@ abstract class Driver {
sys.exit(if (reporter.hasErrors) 1 else 0)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala
index c4147fad4c..15a296c836 100644
--- a/src/compiler/scala/tools/nsc/EvalLoop.scala
+++ b/src/compiler/scala/tools/nsc/EvalLoop.scala
@@ -14,7 +14,7 @@ trait EvalLoop {
def loop(action: (String) => Unit) {
@tailrec def inner() {
Console.print(prompt)
- val line = try Console.readLine catch { case _: EOFException => null }
+ val line = try Console.readLine() catch { case _: EOFException => null }
if (line != null && line != "") {
action(line)
inner()
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
index c8fd5985c6..e710222285 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala
@@ -26,7 +26,7 @@ extends CompilerCommand(args, settings) {
// change CompilerCommand behavior
override def shouldProcessArguments: Boolean = false
- private lazy val (_ok, targetAndArguments) = settings.processArguments(args, false)
+ private lazy val (_ok, targetAndArguments) = settings.processArguments(args, processAll = false)
override def ok = _ok
private def guessHowToRun(target: String): GenericRunnerCommand.HowToRun = {
if (!ok) Error
diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
index 9c2db11a56..ad75d02bff 100644
--- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
+++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala
@@ -39,7 +39,4 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) {
val nc = BooleanSetting(
"-nc",
"do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon"
-
- @deprecated("Use `nc` instead", "2.9.0") def nocompdaemon = nc
- @deprecated("Use `save` instead", "2.9.0") def savecompiled = save
}
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index aea3e0d930..a08633ffc8 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -3,22 +3,23 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
import java.io.{ File, FileOutputStream, PrintWriter, IOException, FileNotFoundException }
import java.nio.charset.{ Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException }
+import java.util.UUID._
import scala.compat.Platform.currentTime
-import scala.tools.util.PathResolver
import scala.collection.{ mutable, immutable }
import io.{ SourceReader, AbstractFile, Path }
import reporters.{ Reporter, ConsoleReporter }
-import util.{ Exceptional, ClassPath, MergedClassPath, StatisticsInfo, ScalaClassLoader, returning }
-import scala.reflect.internal.util.{ NoPosition, OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
+import util.{ ClassPath, MergedClassPath, StatisticsInfo, returning, stackTraceString, stackTraceHeadString }
+import scala.reflect.internal.util.{ OffsetPosition, SourceFile, NoSourceFile, BatchSourceFile, ScriptSourceFile }
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import settings.{ AestheticSettings }
+import scala.reflect.io.VirtualFile
import symtab.{ Flags, SymbolTable, SymbolLoaders, SymbolTrackers }
import symtab.classfile.Pickler
-import dependencies.DependencyAnalysis
import plugins.Plugins
import ast._
import ast.parser._
@@ -26,13 +27,11 @@ import typechecker._
import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
-import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
-import backend.jvm.{GenJVM, GenASM}
-import backend.opt.{ Inliners, InlineExceptionHandlers, ClosureElimination, DeadCodeElimination }
+import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.jvm.GenASM
+import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
import scala.language.postfixOps
-import scala.reflect.internal.StdAttachments
-import scala.reflect.ClassTag
class Global(var currentSettings: Settings, var reporter: Reporter)
extends SymbolTable
@@ -47,6 +46,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// the mirror --------------------------------------------------
override def isCompilerUniverse = true
+ override val useOffsetPositions = !currentSettings.Yrangepos.value
class GlobalMirror extends Roots(NoSymbol) {
val universe: self.type = self
@@ -75,8 +75,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def this(settings: Settings) =
this(settings, new ConsoleReporter(settings))
- def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree = gen.mkAttributedQualifier(tpe, termSym)
-
def picklerPhase: Phase = if (currentRun.isDefined) currentRun.picklerPhase else NoPhase
// platform specific elements
@@ -84,8 +82,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
type ThisPlatform = Platform { val global: Global.this.type }
lazy val platform: ThisPlatform =
- if (forMSIL) new { val global: Global.this.type = Global.this } with MSILPlatform
- else new { val global: Global.this.type = Global.this } with JavaPlatform
+ new { val global: Global.this.type = Global.this } with JavaPlatform
type PlatformClassPath = ClassPath[platform.BinaryRepr]
type OptClassPath = Option[PlatformClassPath]
@@ -173,7 +170,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (lastPrintedSource == source)
println(": tree is unchanged since " + lastPrintedPhase)
else {
- lastPrintedPhase = phase.prev // since we're running inside "afterPhase"
+ lastPrintedPhase = phase.prev // since we're running inside "exitingPhase"
lastPrintedSource = source
println("")
println(source)
@@ -225,10 +222,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// not deprecated yet, but a method called "error" imported into
// nearly every trait really must go. For now using globalError.
def error(msg: String) = globalError(msg)
- def inform(msg: String) = reporter.echo(msg)
+ override def inform(msg: String) = reporter.echo(msg)
override def globalError(msg: String) = reporter.error(NoPosition, msg)
override def warning(msg: String) =
- if (settings.fatalWarnings.value) globalError(msg)
+ if (settings.fatalWarnings) globalError(msg)
else reporter.warning(NoPosition, msg)
// Getting in front of Predef's asserts to supplement with more info.
@@ -236,13 +233,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// of assert and require (but for now I've reproduced them here,
// because there are a million to fix.)
@inline final def assert(assertion: Boolean, message: => Any) {
- Predef.assert(assertion, supplementErrorMessage("" + message))
+ // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument.
+ if (!assertion)
+ throw new java.lang.AssertionError("assertion failed: "+ supplementErrorMessage("" + message))
}
@inline final def assert(assertion: Boolean) {
assert(assertion, "")
}
@inline final def require(requirement: Boolean, message: => Any) {
- Predef.require(requirement, supplementErrorMessage("" + message))
+ // calling Predef.require would send a freshly allocated closure wrapping the one received as argument.
+ if (!requirement)
+ throw new IllegalArgumentException("requirement failed: "+ supplementErrorMessage("" + message))
}
@inline final def require(requirement: Boolean) {
require(requirement, "")
@@ -255,30 +256,31 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
@inline final def ifDebug(body: => Unit) {
- if (settings.debug.value)
+ if (settings.debug)
body
}
- // Warnings issued only under -Ydebug. For messages which should reach
- // developer ears, but are not adequately actionable by users.
- @inline final override def debugwarn(msg: => String) {
- if (settings.debug.value)
- warning(msg)
+ /** This is for WARNINGS which should reach the ears of scala developers
+ * whenever they occur, but are not useful for normal users. They should
+ * be precise, explanatory, and infrequent. Please don't use this as a
+ * logging mechanism. !!! is prefixed to all messages issued via this route
+ * to make them visually distinct.
+ */
+ @inline final override def devWarning(msg: => String) {
+ if (settings.developer || settings.debug)
+ warning("!!! " + msg)
+ else
+ log("!!! " + msg) // such warnings always at least logged
}
private def elapsedMessage(msg: String, start: Long) =
msg + " in " + (currentTime - start) + "ms"
def informComplete(msg: String): Unit = reporter.withoutTruncating(inform(msg))
- def informProgress(msg: String) = if (opt.verbose) inform("[" + msg + "]")
- def inform[T](msg: String, value: T): T = returning(value)(x => inform(msg + x))
+ def informProgress(msg: String) = if (settings.verbose) inform("[" + msg + "]")
def informTime(msg: String, start: Long) = informProgress(elapsedMessage(msg, start))
def logError(msg: String, t: Throwable): Unit = ()
- def logAfterEveryPhase[T](msg: String)(op: => T) {
- log("Running operation '%s' after every phase.\n".format(msg) + describeAfterEveryPhase(op))
- }
-
override def shouldLogAtThisPhase = settings.log.isSetByUser && (
(settings.log containsPhase globalPhase) || (settings.log containsPhase phase)
)
@@ -289,7 +291,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
@inline final override def debuglog(msg: => String) {
- if (settings.debug.value)
+ if (settings.debug)
log(msg)
}
@@ -302,7 +304,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
private val reader: SourceReader = {
val defaultEncoding = Properties.sourceEncoding
- val defaultReader = Properties.sourceReader
def loadCharset(name: String) =
try Some(Charset.forName(name))
@@ -315,7 +316,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
None
}
- val charset = opt.encoding flatMap loadCharset getOrElse {
+ val charset = ( if (settings.encoding.isSetByUser) Some(settings.encoding.value) else None ) flatMap loadCharset getOrElse {
settings.encoding.value = defaultEncoding // A mandatory charset
Charset.forName(defaultEncoding)
}
@@ -330,62 +331,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- opt.sourceReader flatMap loadReader getOrElse {
+ ( if (settings.sourceReader.isSetByUser) Some(settings.sourceReader.value) else None ) flatMap loadReader getOrElse {
new SourceReader(charset.newDecoder(), reporter)
}
}
- if (!dependencyAnalysis.off)
- dependencyAnalysis.loadDependencyAnalysis()
-
- if (opt.verbose || opt.logClasspath) {
+ if (settings.verbose || settings.Ylogcp) {
// Uses the "do not truncate" inform
informComplete("[search path for source files: " + classPath.sourcepaths.mkString(",") + "]")
informComplete("[search path for class files: " + classPath.asClasspathString + "]")
}
- object opt extends AestheticSettings {
- def settings = Global.this.settings
-
- // protected implicit lazy val globalPhaseOrdering: Ordering[Phase] = Ordering[Int] on (_.id)
- def isActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase
- def wasActive(ph: Settings#PhasesSetting) = ph containsPhase globalPhase.prev
-
- // Allows for syntax like scalac -Xshow-class Random@erasure,typer
- private def splitClassAndPhase(str: String, term: Boolean): Name = {
- def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
- (str indexOf '@') match {
- case -1 => mkName(str)
- case idx =>
- val phasePart = str drop (idx + 1)
- settings.Yshow.tryToSetColon(phasePart split ',' toList)
- mkName(str take idx)
- }
- }
-
- // behavior
-
- // debugging
- def checkPhase = wasActive(settings.check)
- def logPhase = isActive(settings.log)
-
- // Write *.icode files right after GenICode when -Xprint-icode was given.
- def writeICodeAtICode = settings.writeICode.isSetByUser && isActive(settings.writeICode)
-
- // showing/printing things
- def browsePhase = isActive(settings.browse)
- def echoFilenames = opt.debug && (opt.verbose || currentRun.size < 5)
- def noShow = settings.Yshow.isDefault
- def printLate = settings.printLate.value
- def printPhase = isActive(settings.Xprint)
- def showNames = List(showClass, showObject).flatten
- def showPhase = isActive(settings.Yshow)
- def showSymbols = settings.Yshowsyms.value
- def showTrees = settings.Xshowtrees.value || settings.XshowtreesCompact.value || settings.XshowtreesStringified.value
- val showClass = optSetting[String](settings.Xshowcls) map (x => splitClassAndPhase(x, false))
- val showObject = optSetting[String](settings.Xshowobj) map (x => splitClassAndPhase(x, true))
- }
-
// The current division between scala.reflect.* and scala.tools.nsc.* is pretty
// clunky. It is often difficult to have a setting influence something without having
// to create it on that side. For this one my strategy is a constant def at the file
@@ -394,11 +350,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// Here comes another one...
override protected val enableTypeVarExperimentals = settings.Xexperimental.value
- // True if -Xscript has been set, indicating a script run.
- def isScriptRun = opt.script.isDefined
-
def getSourceFile(f: AbstractFile): BatchSourceFile =
- if (isScriptRun) ScriptSourceFile(f, reader read f)
+ if (settings.script.isSetByUser) ScriptSourceFile(f, reader read f)
else new BatchSourceFile(f, reader read f)
def getSourceFile(name: String): SourceFile = {
@@ -453,7 +406,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if ((unit ne null) && unit.exists)
lastSeenSourceFile = unit.source
- if (opt.echoFilenames)
+ if (settings.debug && (settings.verbose || currentRun.size < 5))
inform("[running phase " + name + " on " + unit + "]")
val unit0 = currentUnit
@@ -463,7 +416,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
currentRun.informUnitStarting(this, unit)
apply(unit)
}
- currentRun.advanceUnit
+ currentRun.advanceUnit()
} finally {
//assert(currentUnit == unit)
currentRun.currentUnit = unit0
@@ -472,16 +425,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** Switch to turn on detailed type logs */
- var printTypings = settings.Ytyperdebug.value
- var printInfers = settings.Yinferdebug.value
+ val printTypings = settings.Ytyperdebug.value
+ val printInfers = settings.Yinferdebug.value
// phaseName = "parser"
- object syntaxAnalyzer extends {
+ lazy val syntaxAnalyzer = new {
val global: Global.this.type = Global.this
val runsAfter = List[String]()
val runsRightAfter = None
} with SyntaxAnalyzer
+ import syntaxAnalyzer.{ UnitScanner, UnitParser }
+
// !!! I think we're overdue for all these phase objects being lazy vals.
// There's no way for a Global subclass to provide a custom typer
// despite the existence of a "def newTyper(context: Context): Typer"
@@ -629,7 +584,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with Inliners
- // phaseName = "inlineExceptionHandlers"
+ // phaseName = "inlinehandlers"
object inlineExceptionHandlers extends {
val global: Global.this.type = Global.this
val runsAfter = List("inliner")
@@ -639,23 +594,23 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// phaseName = "closelim"
object closureElimination extends {
val global: Global.this.type = Global.this
- val runsAfter = List("inlineExceptionHandlers")
+ val runsAfter = List("inlinehandlers")
val runsRightAfter = None
} with ClosureElimination
- // phaseName = "dce"
- object deadCode extends {
+ // phaseName = "constopt"
+ object constantOptimization extends {
val global: Global.this.type = Global.this
val runsAfter = List("closelim")
val runsRightAfter = None
- } with DeadCodeElimination
+ } with ConstantOptimization
- // phaseName = "jvm", FJBG-based version
- object genJVM extends {
+ // phaseName = "dce"
+ object deadCode extends {
val global: Global.this.type = Global.this
- val runsAfter = List("dce")
+ val runsAfter = List("closelim")
val runsRightAfter = None
- } with GenJVM
+ } with DeadCodeElimination
// phaseName = "jvm", ASM-based version
object genASM extends {
@@ -664,19 +619,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with GenASM
- // This phase is optional: only added if settings.make option is given.
- // phaseName = "dependencyAnalysis"
- object dependencyAnalysis extends {
- val global: Global.this.type = Global.this
- val runsAfter = List("jvm")
- val runsRightAfter = None
- } with DependencyAnalysis
-
// phaseName = "terminal"
object terminal extends {
val global: Global.this.type = Global.this
val phaseName = "terminal"
- val runsAfter = List("jvm", "msil")
+ val runsAfter = List("jvm")
val runsRightAfter = None
} with SubComponent {
private var cache: Option[GlobalPhase] = None
@@ -691,13 +638,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
- // phaseName = "SAMPLE PHASE"
- object sampleTransform extends {
- val global: Global.this.type = Global.this
- val runsAfter = List[String]()
- val runsRightAfter = None
- } with SampleTransform
-
/** The checkers are for validating the compiler data structures
* at phase boundaries.
*/
@@ -749,6 +689,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
inliner -> "optimization: do inlining",
inlineExceptionHandlers -> "optimization: inline exception handlers",
closureElimination -> "optimization: eliminate uncalled closures",
+ constantOptimization -> "optimization: optimize null and other constants",
deadCode -> "optimization: eliminate dead code",
terminal -> "The last phase in the compiler chain"
)
@@ -794,13 +735,41 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** A description of the phases that will run */
def phaseDescriptions: String = {
- val width = phaseNames map (_.length) max
- val fmt = "%" + width + "s %2s %s\n"
+ val Limit = 16 // phase names should not be absurdly long
+ val MaxCol = 80 // because some of us edit on green screens
+ val maxName = (0 /: phaseNames)(_ max _.length)
+ val width = maxName min Limit
+ val maxDesc = MaxCol - (width + 6) // descriptions not novels
+ val fmt = if (settings.verbose) s"%${maxName}s %2s %s%n"
+ else s"%${width}.${width}s %2s %.${maxDesc}s%n"
val line1 = fmt.format("phase name", "id", "description")
val line2 = fmt.format("----------", "--", "-----------")
+
+ // built-in string precision merely truncates
+ import java.util.{ Formattable, FormattableFlags, Formatter }
+ def fmtable(s: String) = new Formattable {
+ override def formatTo(formatter: Formatter, flags: Int, width: Int, precision: Int) {
+ val p = elliptically(s, precision)
+ val w = if (width > 0 && p.length < width) {
+ import FormattableFlags.LEFT_JUSTIFY
+ val leftly = (flags & LEFT_JUSTIFY) == LEFT_JUSTIFY
+ val sb = new StringBuilder
+ def pad() = 1 to width - p.length foreach (_ => sb.append(' '))
+ if (!leftly) pad()
+ sb.append(p)
+ if (leftly) pad()
+ sb.toString
+ } else p
+ formatter.out.append(w)
+ }
+ }
+ def elliptically(s: String, max: Int) =
+ if (max < 0 || s.length <= max) s
+ else if (max < 4) s.take(max)
+ else s.take(max - 3) + "..."
val descs = phaseDescriptors.zipWithIndex map {
- case (ph, idx) => fmt.format(ph.phaseName, idx + 1, phasesDescMap(ph))
+ case (ph, idx) => fmt.format(fmtable(ph.phaseName), idx + 1, fmtable(phasesDescMap(ph)))
}
line1 :: line2 :: descs mkString
}
@@ -830,48 +799,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Returns List of (phase, value) pairs, including only those
* where the value compares unequal to the previous phase's value.
*/
- def afterEachPhase[T](op: => T): List[(Phase, T)] = {
+ def afterEachPhase[T](op: => T): List[(Phase, T)] = { // used in tests
phaseDescriptors.map(_.ownPhase).filterNot(_ eq NoPhase).foldLeft(List[(Phase, T)]()) { (res, ph) =>
- val value = afterPhase(ph)(op)
+ val value = exitingPhase(ph)(op)
if (res.nonEmpty && res.head._2 == value) res
else ((ph, value)) :: res
} reverse
}
- /** Returns List of ChangeAfterPhase objects, encapsulating those
- * phase transitions where the result of the operation gave a different
- * list than it had when run during the previous phase.
- */
- def changesAfterEachPhase[T](op: => List[T]): List[ChangeAfterPhase[T]] = {
- val ops = ((NoPhase, Nil)) :: afterEachPhase(op)
-
- ops sliding 2 map {
- case (_, before) :: (ph, after) :: Nil =>
- val lost = before filterNot (after contains _)
- val gained = after filterNot (before contains _)
- ChangeAfterPhase(ph, lost, gained)
- case _ => ???
- } toList
- }
- private def numberedPhase(ph: Phase) = "%2d/%s".format(ph.id, ph.name)
-
- case class ChangeAfterPhase[+T](ph: Phase, lost: List[T], gained: List[T]) {
- private def mkStr(what: String, xs: List[_]) = (
- if (xs.isEmpty) ""
- else xs.mkString(what + " after " + numberedPhase(ph) + " {\n ", "\n ", "\n}\n")
- )
- override def toString = mkStr("Lost", lost) + mkStr("Gained", gained)
- }
-
- def describeAfterEachPhase[T](op: => T): List[String] =
- afterEachPhase(op) map { case (ph, t) => "[after %-15s] %s".format(numberedPhase(ph), t) }
-
- def describeAfterEveryPhase[T](op: => T): String =
- describeAfterEachPhase(op) map (" " + _ + "\n") mkString
-
- def printAfterEachPhase[T](op: => T): Unit =
- describeAfterEachPhase(op) foreach (m => println(" " + m))
-
// ------------ Invalidations ---------------------------------
/** Is given package class a system package class that cannot be invalidated?
@@ -885,8 +820,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Invalidates packages that contain classes defined in a classpath entry, and
* rescans that entry.
- * @param path A fully qualified name that refers to a directory or jar file that's
- * an entry on the classpath.
+ * @param paths Fully qualified names that refer to directories or jar files that are
+ * a entries on the classpath.
* First, causes the classpath entry referred to by `path` to be rescanned, so that
* any new files or deleted files or changes in subpackages are picked up.
* Second, invalidates any packages for which one of the following considitions is met:
@@ -1074,7 +1009,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
object typeDeconstruct extends {
val global: Global.this.type = Global.this
- } with interpreter.StructuredTypeStrings
+ } with typechecker.StructuredTypeStrings
/** There are common error conditions where when the exception hits
* here, currentRun.currentUnit is null. This robs us of the knowledge
@@ -1094,40 +1029,37 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
- // TODO - trim these to the absolute minimum.
- @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op)
- @inline final def afterPostErasure[T](op: => T): T = afterPhase(currentRun.posterasurePhase)(op)
- @inline final def afterExplicitOuter[T](op: => T): T = afterPhase(currentRun.explicitouterPhase)(op)
- @inline final def afterFlatten[T](op: => T): T = afterPhase(currentRun.flattenPhase)(op)
- @inline final def afterIcode[T](op: => T): T = afterPhase(currentRun.icodePhase)(op)
- @inline final def afterMixin[T](op: => T): T = afterPhase(currentRun.mixinPhase)(op)
- @inline final def afterPickler[T](op: => T): T = afterPhase(currentRun.picklerPhase)(op)
- @inline final def afterRefchecks[T](op: => T): T = afterPhase(currentRun.refchecksPhase)(op)
- @inline final def afterSpecialize[T](op: => T): T = afterPhase(currentRun.specializePhase)(op)
- @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op)
- @inline final def afterUncurry[T](op: => T): T = afterPhase(currentRun.uncurryPhase)(op)
- @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op)
- @inline final def beforeExplicitOuter[T](op: => T): T = beforePhase(currentRun.explicitouterPhase)(op)
- @inline final def beforeFlatten[T](op: => T): T = beforePhase(currentRun.flattenPhase)(op)
- @inline final def beforeIcode[T](op: => T): T = beforePhase(currentRun.icodePhase)(op)
- @inline final def beforeMixin[T](op: => T): T = beforePhase(currentRun.mixinPhase)(op)
- @inline final def beforePickler[T](op: => T): T = beforePhase(currentRun.picklerPhase)(op)
- @inline final def beforeRefchecks[T](op: => T): T = beforePhase(currentRun.refchecksPhase)(op)
- @inline final def beforeSpecialize[T](op: => T): T = beforePhase(currentRun.specializePhase)(op)
- @inline final def beforeTyper[T](op: => T): T = beforePhase(currentRun.typerPhase)(op)
- @inline final def beforeUncurry[T](op: => T): T = beforePhase(currentRun.uncurryPhase)(op)
-
- def explainContext(c: analyzer.Context): String = (
- if (c == null) "" else (
- """| context owners: %s
- |
- |Enclosing block or template:
- |%s""".format(
- c.owner.ownerChain.takeWhile(!_.isPackageClass).mkString(" -> "),
- nodePrinters.nodeToString(c.enclClassOrMethod.tree)
- )
- )
+ def isGlobalInitialized = (
+ definitions.isDefinitionsInitialized
+ && rootMirror.isMirrorInitialized
)
+ override def isPastTyper = (
+ (curRun ne null)
+ && isGlobalInitialized // defense against init order issues
+ && (globalPhase.id > currentRun.typerPhase.id)
+ )
+
+ // TODO - trim these to the absolute minimum.
+ @inline final def exitingErasure[T](op: => T): T = exitingPhase(currentRun.erasurePhase)(op)
+ @inline final def exitingPostErasure[T](op: => T): T = exitingPhase(currentRun.posterasurePhase)(op)
+ @inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op)
+ @inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op)
+ @inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op)
+ @inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op)
+ @inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op)
+ @inline final def exitingSpecialize[T](op: => T): T = exitingPhase(currentRun.specializePhase)(op)
+ @inline final def exitingTyper[T](op: => T): T = exitingPhase(currentRun.typerPhase)(op)
+ @inline final def exitingUncurry[T](op: => T): T = exitingPhase(currentRun.uncurryPhase)(op)
+ @inline final def enteringErasure[T](op: => T): T = enteringPhase(currentRun.erasurePhase)(op)
+ @inline final def enteringExplicitOuter[T](op: => T): T = enteringPhase(currentRun.explicitouterPhase)(op)
+ @inline final def enteringFlatten[T](op: => T): T = enteringPhase(currentRun.flattenPhase)(op)
+ @inline final def enteringIcode[T](op: => T): T = enteringPhase(currentRun.icodePhase)(op)
+ @inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op)
+ @inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op)
+ @inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op)
+ @inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
+ @inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op)
+
// Owners up to and including the first package class.
private def ownerChainString(sym: Symbol): String = (
if (sym == null) ""
@@ -1140,9 +1072,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
pairs.toList collect { case (k, v) if v != null => "%20s: %s".format(k, v) } mkString "\n"
)
- def explainTree(t: Tree): String = formatExplain(
- )
-
/** Don't want to introduce new errors trying to report errors,
* so swallow exceptions.
*/
@@ -1156,7 +1085,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val info1 = formatExplain(
"while compiling" -> currentSource.path,
- "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, atPhase=%s".format(globalPhase, phase) ),
+ "during phase" -> ( if (globalPhase eq phase) phase else "global=%s, enteringPhase=%s".format(globalPhase, phase) ),
"library version" -> scala.util.Properties.versionString,
"compiler version" -> Properties.versionString,
"reconstructed args" -> settings.recreateArgs.mkString(" ")
@@ -1172,7 +1101,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val info3: List[String] = (
( List("== Enclosing template or block ==", nodePrinters.nodeToString(enclosing).trim) )
++ ( if (tpe eq null) Nil else List("== Expanded type of tree ==", typeDeconstruct.show(tpe)) )
- ++ ( if (!opt.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
+ ++ ( if (!settings.debug) Nil else List("== Current unit body ==", nodePrinters.nodeToString(currentUnit.body)) )
++ ( List(errorMessage) )
)
@@ -1187,8 +1116,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
override def currentRunId = curRunId
def echoPhaseSummary(ph: Phase) = {
- /** Only output a summary message under debug if we aren't echoing each file. */
- if (opt.debug && !opt.echoFilenames)
+ /* Only output a summary message under debug if we aren't echoing each file. */
+ if (settings.debug && !(settings.verbose || currentRun.size < 5))
inform("[running phase " + ph.name + " on " + currentRun.size + " compilation units]")
}
@@ -1196,17 +1125,18 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
class ConditionalWarning(what: String, option: Settings#BooleanSetting) {
val warnings = mutable.LinkedHashMap[Position, String]()
def warn(pos: Position, msg: String) =
- if (option.value) reporter.warning(pos, msg)
+ if (option) reporter.warning(pos, msg)
else if (!(warnings contains pos)) warnings += ((pos, msg))
def summarize() =
- if (option.isDefault && warnings.nonEmpty)
- reporter.warning(NoPosition, "there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
+ if (warnings.nonEmpty && (option.isDefault || settings.fatalWarnings))
+ warning("there were %d %s warning(s); re-run with %s for details".format(warnings.size, what, option.name))
}
- def newUnitParser(code: String) = new syntaxAnalyzer.UnitParser(newCompilationUnit(code))
- def newUnitScanner(code: String) = new syntaxAnalyzer.UnitScanner(newCompilationUnit(code))
- def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
- def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newCompilationUnit(code: String) = new CompilationUnit(newSourceFile(code))
+ def newSourceFile(code: String) = new BatchSourceFile("<console>", code)
+ def newUnitScanner(unit: CompilationUnit): UnitScanner = new UnitScanner(unit)
+ def newUnitParser(unit: CompilationUnit): UnitParser = new UnitParser(unit)
+ def newUnitParser(code: String): UnitParser = newUnitParser(newCompilationUnit(code))
/** A Run is a single execution of the compiler on a sets of units
*/
@@ -1227,9 +1157,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val inlinerWarnings = new ConditionalWarning("inliner", settings.YinlinerWarnings)
val allConditionalWarnings = List(deprecationWarnings0, uncheckedWarnings0, featureWarnings, inlinerWarnings)
- // for sbt's benefit
- def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList
- def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList
+ def uncheckedWarnings: List[(Position, String)] = uncheckedWarnings0.warnings.toList // used in sbt
+ def deprecationWarnings: List[(Position, String)] = deprecationWarnings0.warnings.toList // used in sbt
var reportedFeature = Set[Symbol]()
@@ -1239,9 +1168,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
/** Have we already supplemented the error message of a compiler crash? */
private[nsc] final var supplementedError = false
- /** To be initialized from firstPhase. */
- private var terminalPhase: Phase = NoPhase
-
private val unitbuf = new mutable.ListBuffer[CompilationUnit]
val compiledFiles = new mutable.HashSet[String]
@@ -1284,14 +1210,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
curRunId += 1
curRun = this
- /** Set phase to a newly created syntaxAnalyzer and call definitions.init. */
+ /* Set phase to a newly created syntaxAnalyzer and call definitions.init. */
val parserPhase: Phase = syntaxAnalyzer.newPhase(NoPhase)
phase = parserPhase
definitions.init()
// Flush the cache in the terminal phase: the chain could have been built
// before without being used. (This happens in the interpreter.)
- terminal.reset
+ terminal.reset()
// Each subcomponent supplies a phase, which are chained together.
// If -Ystop:phase is given, neither that phase nor any beyond it is added.
@@ -1318,7 +1244,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def resetProjectClasses(root: Symbol): Unit = try {
def unlink(sym: Symbol) =
if (sym != NoSymbol) root.info.decls.unlink(sym)
- if (settings.verbose.value) inform("[reset] recursing in "+root)
+ if (settings.verbose) inform("[reset] recursing in "+root)
val toReload = mutable.Set[String]()
for (sym <- root.info.decls) {
if (sym.isInitialized && clearOnNextRun(sym))
@@ -1338,7 +1264,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
for (fullname <- toReload)
classPath.findClass(fullname) match {
case Some(classRep) =>
- if (settings.verbose.value) inform("[reset] reinit "+fullname)
+ if (settings.verbose) inform("[reset] reinit "+fullname)
loaders.initializeFromClassPath(root, classRep)
case _ =>
}
@@ -1347,8 +1273,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// this handler should not be nessasary, but it seems that `fsc`
// eats exceptions if they appear here. Need to find out the cause for
// this and fix it.
- inform("[reset] exception happened: "+ex);
- ex.printStackTrace();
+ inform("[reset] exception happened: "+ex)
+ ex.printStackTrace()
throw ex
}
@@ -1374,14 +1300,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def advancePhase() {
unitc = 0
phasec += 1
- refreshProgress
+ refreshProgress()
}
/** take note that a phase on a unit is completed
* (for progress reporting)
*/
def advanceUnit() {
unitc += 1
- refreshProgress
+ refreshProgress()
}
def cancel() { reporter.cancelled = true }
@@ -1402,7 +1328,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val namerPhase = phaseNamed("namer")
// val packageobjectsPhase = phaseNamed("packageobjects")
val typerPhase = phaseNamed("typer")
- val inlineclassesPhase = phaseNamed("inlineclasses")
+ // val inlineclassesPhase = phaseNamed("inlineclasses")
// val superaccessorsPhase = phaseNamed("superaccessors")
val picklerPhase = phaseNamed("pickler")
val refchecksPhase = phaseNamed("refchecks")
@@ -1415,22 +1341,19 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val erasurePhase = phaseNamed("erasure")
val posterasurePhase = phaseNamed("posterasure")
// val lazyvalsPhase = phaseNamed("lazyvals")
- val lambdaliftPhase = phaseNamed("lambdalift")
+ // val lambdaliftPhase = phaseNamed("lambdalift")
// val constructorsPhase = phaseNamed("constructors")
val flattenPhase = phaseNamed("flatten")
val mixinPhase = phaseNamed("mixin")
val cleanupPhase = phaseNamed("cleanup")
val icodePhase = phaseNamed("icode")
val inlinerPhase = phaseNamed("inliner")
- val inlineExceptionHandlersPhase = phaseNamed("inlineExceptionHandlers")
+ val inlineExceptionHandlersPhase = phaseNamed("inlinehandlers")
val closelimPhase = phaseNamed("closelim")
val dcePhase = phaseNamed("dce")
- val jvmPhase = phaseNamed("jvm")
- // val msilPhase = phaseNamed("msil")
+ // val jvmPhase = phaseNamed("jvm")
def runIsAt(ph: Phase) = globalPhase.id == ph.id
- def runIsPast(ph: Phase) = globalPhase.id > ph.id
- // def runIsAtBytecodeGen = (runIsAt(jvmPhase) || runIsAt(msilPhase))
def runIsAtOptimiz = {
runIsAt(inlinerPhase) || // listing phases in full for robustness when -Ystop-after has been given.
runIsAt(inlineExceptionHandlersPhase) ||
@@ -1473,7 +1396,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compiles(sym: Symbol): Boolean =
if (sym == NoSymbol) false
else if (symSource.isDefinedAt(sym)) true
- else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass)
+ else if (!sym.isTopLevel) compiles(sym.enclosingTopLevelClass)
else if (sym.isModuleClass) compiles(sym.sourceModule)
else false
@@ -1494,13 +1417,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (canCheck) {
phase = globalPhase
- if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes
- else treeChecker.checkTrees
+ if (globalPhase.id >= icodePhase.id) icodeChecker.checkICodes()
+ else treeChecker.checkTrees()
}
}
- private def showMembers() =
- opt.showNames foreach (x => showDef(x, opt.declsOnly, globalPhase))
+ private def showMembers() = {
+ // Allows for syntax like scalac -Xshow-class Random@erasure,typer
+ def splitClassAndPhase(str: String, term: Boolean): Name = {
+ def mkName(s: String) = if (term) newTermName(s) else newTypeName(s)
+ (str indexOf '@') match {
+ case -1 => mkName(str)
+ case idx =>
+ val phasePart = str drop (idx + 1)
+ settings.Yshow.tryToSetColon(phasePart split ',' toList)
+ mkName(str take idx)
+ }
+ }
+ if (settings.Xshowcls.isSetByUser)
+ showDef(splitClassAndPhase(settings.Xshowcls.value, term = false), declsOnly = false, globalPhase)
+
+ if (settings.Xshowobj.isSetByUser)
+ showDef(splitClassAndPhase(settings.Xshowobj.value, term = true), declsOnly = false, globalPhase)
+ }
// Similarly, this will only be created under -Yshow-syms.
object trackerFactory extends SymbolTrackers {
@@ -1508,7 +1447,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x))
def snapshot() = {
inform("\n[[symbol layout at end of " + phase + "]]")
- afterPhase(phase) {
+ exitingPhase(phase) {
trackers foreach { t =>
t.snapshot()
inform(t.show("Heading from " + phase.prev.name + " to " + phase.name))
@@ -1518,6 +1457,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
def reportCompileErrors() {
+ if (!reporter.hasErrors && reporter.hasWarnings && settings.fatalWarnings)
+ globalError("No warnings can be incurred under -Xfatal-warnings.")
+
if (reporter.hasErrors) {
for ((sym, file) <- symSource.iterator) {
sym.reset(new loaders.SourcefileLoader(file))
@@ -1526,7 +1468,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
}
else {
- allConditionalWarnings foreach (_.summarize)
+ allConditionalWarnings foreach (_.summarize())
if (seenMacroExpansionsFallingBack)
warning("some macros could not be expanded and code fell back to overridden methods;"+
@@ -1536,9 +1478,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** Compile list of source files */
- def compileSources(_sources: List[SourceFile]) {
- val depSources = dependencyAnalysis calculateFiles _sources.distinct
- val sources = coreClassesFirst(depSources)
+ def compileSources(sources: List[SourceFile]) {
// there is a problem already, e.g. a plugin was passed a bad option
if (reporter.hasErrors)
return
@@ -1556,12 +1496,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compileUnits(units: List[CompilationUnit], fromPhase: Phase) {
try compileUnitsInternal(units, fromPhase)
catch { case ex: Throwable =>
- val shown = if (settings.verbose.value) {
- val pw = new java.io.PrintWriter(new java.io.StringWriter)
- ex.printStackTrace(pw)
- pw.toString
- } else ex.getClass.getName
- // ex.printStackTrace(Console.out) // DEBUG for fsc, note that error stacktraces do not print in fsc
+ val shown = if (settings.verbose)
+ stackTraceString(ex)
+ else
+ stackTraceHeadString(ex) // note that error stacktraces do not print in fsc
+
globalError(supplementErrorMessage("uncaught exception during compilation: " + shown))
throw ex
}
@@ -1580,67 +1519,66 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
while (globalPhase.hasNext && !reporter.hasErrors) {
val startTime = currentTime
phase = globalPhase
- globalPhase.run
+ globalPhase.run()
// progress update
informTime(globalPhase.description, startTime)
-
- if (opt.writeICodeAtICode || (opt.printPhase && runIsAtOptimiz)) {
+ val shouldWriteIcode = (
+ (settings.writeICode.isSetByUser && (settings.writeICode containsPhase globalPhase))
+ || (!settings.Xprint.doAllPhases && (settings.Xprint containsPhase globalPhase) && runIsAtOptimiz)
+ )
+ if (shouldWriteIcode) {
// Write *.icode files when -Xprint-icode or -Xprint:<some-optimiz-phase> was given.
writeICode()
- } else if (opt.printPhase || opt.printLate && runIsAt(cleanupPhase)) {
+ } else if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) {
// print trees
- if (opt.showTrees) nodePrinters.printAll()
+ if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll()
else printAllUnits()
}
// print the symbols presently attached to AST nodes
- if (opt.showSymbols)
+ if (settings.Yshowsyms)
trackerFactory.snapshot()
// print members
- if (opt.showPhase)
+ if (settings.Yshow containsPhase globalPhase)
showMembers()
// browse trees with swing tree viewer
- if (opt.browsePhase)
+ if (settings.browse containsPhase globalPhase)
treeBrowser browse (phase.name, units)
// move the pointer
globalPhase = globalPhase.next
// run tree/icode checkers
- if (opt.checkPhase)
+ if (settings.check containsPhase globalPhase.prev)
runCheckers()
// output collected statistics
- if (opt.printStats)
+ if (settings.Ystatistics)
statistics.print(phase)
- advancePhase
+ advancePhase()
}
if (traceSymbolActivity)
units map (_.body) foreach (traceSymbols recordSymbolsInTree _)
// In case no phase was specified for -Xshow-class/object, show it now for sure.
- if (opt.noShow)
+ if (settings.Yshow.isDefault)
showMembers()
reportCompileErrors()
symSource.keys foreach (x => resetPackageClass(x.owner))
informTime("total", startTime)
- // record dependency data
- if (!dependencyAnalysis.off)
- dependencyAnalysis.saveDependencyAnalysis()
-
// Clear any sets or maps created via perRunCaches.
perRunCaches.clearAll()
// Reset project
if (!stopPhase("namer")) {
- atPhase(namerPhase) {
+ enteringPhase(namerPhase) {
resetProjectClasses(RootClass)
}
}
@@ -1656,7 +1594,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def compile(filenames: List[String]) {
try {
val sources: List[SourceFile] =
- if (isScriptRun && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
+ if (settings.script.isSetByUser && filenames.size > 1) returning(Nil)(_ => globalError("can only compile one script at a time"))
else filenames map getSourceFile
compileSources(sources)
@@ -1680,65 +1618,44 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
if (firstPhase ne null) { // we might get here during initialization, is a source is newer than the binary
val maxId = math.max(globalPhase.id, typerPhase.id)
firstPhase.iterator takeWhile (_.id < maxId) foreach (ph =>
- atPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
- refreshProgress
+ enteringPhase(ph)(ph.asInstanceOf[GlobalPhase] applyPhase unit))
+ refreshProgress()
}
}
+ // TODO: provide a way to specify a pretty name for debugging purposes
+ private def randomFileName() = (
+ "compileLateSynthetic-" + randomUUID().toString.replace("-", "") + ".scala"
+ )
+
+ def compileLate(code: PackageDef) {
+ // compatibility with SBT
+ // on the one hand, we need to specify some jfile here, otherwise sbt crashes with an NPE (SI-6870)
+ // on the other hand, we can't specify the obvious enclosingUnit, because then sbt somehow fails to run tests using type macros
+ // okay, now let's specify a guaranteedly non-existent file in an existing directory (so that we don't run into permission problems)
+ val syntheticFileName = randomFileName()
+ val fakeJfile = new java.io.File(syntheticFileName)
+ val virtualFile = new VirtualFile(syntheticFileName) { override def file = fakeJfile }
+ val sourceFile = new BatchSourceFile(virtualFile, code.toString)
+ val unit = new CompilationUnit(sourceFile)
+ unit.body = code
+ compileLate(unit)
+ }
+
/** Reset package class to state at typer (not sure what this
* is needed for?)
*/
private def resetPackageClass(pclazz: Symbol) {
- atPhase(firstPhase) {
- pclazz.setInfo(atPhase(typerPhase)(pclazz.info))
+ enteringPhase(firstPhase) {
+ pclazz.setInfo(enteringPhase(typerPhase)(pclazz.info))
}
if (!pclazz.isRoot) resetPackageClass(pclazz.owner)
}
-
- /**
- * Re-orders the source files to
- * 1. This Space Intentionally Left Blank
- * 2. LowPriorityImplicits / EmbeddedControls (i.e. parents of Predef)
- * 3. the rest
- *
- * 1 is to avoid cyclic reference errors.
- * 2 is due to the following. When completing "Predef" (*), typedIdent is called
- * for its parents (e.g. "LowPriorityImplicits"). typedIdent checks whether
- * the symbol reallyExists, which tests if the type of the symbol after running
- * its completer is != NoType.
- * If the "namer" phase has not yet run for "LowPriorityImplicits", the symbol
- * has a SourcefileLoader as type. Calling "doComplete" on it does nothing at
- * all, because the source file is part of the files to be compiled anyway.
- * So the "reallyExists" test will return "false".
- * Only after the namer, the symbol has a lazy type which actually computes
- * the info, and "reallyExists" behaves as expected.
- * So we need to make sure that the "namer" phase is run on predef's parents
- * before running it on predef.
- *
- * (*) Predef is completed early when calling "mkAttributedRef" during the
- * addition of "import Predef._" to sourcefiles. So this situation can't
- * happen for user classes.
- *
- */
- private def coreClassesFirst(files: List[SourceFile]) = {
- val goLast = 4
- def rank(f: SourceFile) = {
- if (f.file.container.name != "scala") goLast
- else f.file.name match {
- case "LowPriorityImplicits.scala" => 2
- case "StandardEmbeddings.scala" => 2
- case "EmbeddedControls.scala" => 2
- case "Predef.scala" => 3 /* Predef.scala before Any.scala, etc. */
- case _ => goLast
- }
- }
- files sortBy rank
- }
} // class Run
def printAllUnits() {
print("[[syntax trees at end of %25s]]".format(phase))
- afterPhase(phase)(currentRun.units foreach { unit =>
+ exitingPhase(phase)(currentRun.units foreach { unit =>
nodePrinters showUnit unit
})
}
@@ -1747,7 +1664,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
*/
def showDef(fullName: Name, declsOnly: Boolean, ph: Phase) = {
val boringOwners = Set[Symbol](definitions.AnyClass, definitions.AnyRefClass, definitions.ObjectClass)
- def phased[T](body: => T): T = afterPhase(ph)(body)
+ def phased[T](body: => T): T = exitingPhase(ph)(body)
def boringMember(sym: Symbol) = boringOwners(sym.owner)
def symString(sym: Symbol) = if (sym.isTerm) sym.defString else sym.toString
@@ -1793,7 +1710,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val printer = new icodes.TextPrinter(null, icodes.linearizer)
icodes.classes.values.foreach((cls) => {
val suffix = if (cls.symbol.hasModuleFlag) "$.icode" else ".icode"
- var file = getFile(cls.symbol, suffix)
+ val file = getFile(cls.symbol, suffix)
// if (file.exists())
// file = new File(file.getParentFile(), file.getName() + "1")
try {
@@ -1803,25 +1720,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
informProgress("wrote " + file)
} catch {
case ex: IOException =>
- if (opt.debug) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
globalError("could not write file " + file)
}
})
}
- // In order to not outright break code which overrides onlyPresentation (like sbt 0.7.5.RC0)
- // I restored and deprecated it. That would be enough to avoid the compilation
- // failure, but the override wouldn't accomplish anything. So now forInteractive
- // and forScaladoc default to onlyPresentation, which is the same as defaulting
- // to false except in old code. The downside is that this leaves us calling a
- // deprecated method: but I see no simple way out, so I leave it for now.
- def forJVM = opt.jvm
- override def forMSIL = opt.msil
- def forInteractive = onlyPresentation
- def forScaladoc = onlyPresentation
def createJavadoc = false
-
- @deprecated("Use forInteractive or forScaladoc, depending on what you're after", "2.9.0")
- def onlyPresentation = false
}
object Global {
diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala
index 7d112dfb3e..a66ee572a9 100644
--- a/src/compiler/scala/tools/nsc/Main.scala
+++ b/src/compiler/scala/tools/nsc/Main.scala
@@ -2,80 +2,26 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
+package scala.tools
+package nsc
-package scala.tools.nsc
-
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.msilLibPath
+import scala.language.postfixOps
/** The main class for NSC, a compiler for the programming
- * language Scala.
+ * language Scala.
*/
-object Main extends Driver with EvalLoop {
-
- def resident(compiler: Global) {
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args, new Settings(scalacError))
- compiler.reporter.reset()
- new compiler.Run() compile command.files
- }
+class MainClass extends Driver with EvalLoop {
+ def resident(compiler: Global): Unit = loop { line =>
+ val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
+ compiler.reporter.reset()
+ new compiler.Run() compile command.files
}
- override def processSettingsHook(): Boolean =
- if (settings.Yidedebug.value) {
- settings.Xprintpos.value = true
- settings.Yrangepos.value = true
- val compiler = new interactive.Global(settings, reporter)
- import compiler.{ reporter => _, _ }
-
- val sfs = command.files map getSourceFile
- val reloaded = new interactive.Response[Unit]
- askReload(sfs, reloaded)
-
- reloaded.get.right.toOption match {
- case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
- case None => reporter.reset() // Causes other compiler errors to be ignored
- }
- askShutdown
- false
- }
- else if (settings.Ybuilderdebug.value != "none") {
- def fileSet(files : List[String]) = Set.empty ++ (files map AbstractFile.getFile)
-
- val buildManager = settings.Ybuilderdebug.value match {
- case "simple" => new SimpleBuildManager(settings)
- case _ => new RefinedBuildManager(settings)
- }
- buildManager.addSourceFiles(fileSet(command.files))
-
- // enter resident mode
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args.toList, settings)
- buildManager.update(fileSet(command.files), Set.empty)
- }
- false
- }
- else {
- if (settings.target.value == "msil")
- msilLibPath foreach (x => settings.assemrefs.value += (pathSeparator + x))
- true
- }
-
- override def newCompiler(): Global =
- if (settings.Yrangepos.value) new Global(settings, reporter) with interactive.RangePositions
- else Global(settings, reporter)
-
+ override def newCompiler(): Global = Global(settings, reporter)
override def doCompile(compiler: Global) {
- if (settings.resident.value)
- resident(compiler)
+ if (settings.resident) resident(compiler)
else super.doCompile(compiler)
}
}
+
+object Main extends MainClass { }
diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala
index f18ff19d7d..03190a63f3 100644
--- a/src/compiler/scala/tools/nsc/MainBench.scala
+++ b/src/compiler/scala/tools/nsc/MainBench.scala
@@ -5,28 +5,20 @@
package scala.tools.nsc
-import java.io.File
-import File.pathSeparator
-
-import scala.tools.nsc.interactive.{ RefinedBuildManager, SimpleBuildManager }
-import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.{ BatchSourceFile, FakePos } //{Position}
-import Properties.{ versionString, copyrightString, residentPromptString, msilLibPath }
import scala.reflect.internal.util.Statistics
/** The main class for NSC, a compiler for the programming
* language Scala.
*/
object MainBench extends Driver with EvalLoop {
-
+
lazy val theCompiler = Global(settings, reporter)
-
+
override def newCompiler() = theCompiler
-
+
val NIter = 50
val NBest = 10
-
+
override def main(args: Array[String]) = {
val times = new Array[Long](NIter)
var start = System.nanoTime()
diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
index 50cd51d486..84eb688b63 100644
--- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala
+++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import scala.tools.nsc.reporters.ConsoleReporter
@@ -21,11 +22,11 @@ object MainTokenMetric {
var totale = 0
for (source <- fnames) {
val s = new UnitScanner(new CompilationUnit(compiler.getSourceFile(source)))
- s.nextToken
+ s.nextToken()
var i = 0
while (s.token != EOF) {
i += 1
- s.nextToken
+ s.nextToken()
}
Console.println(i.toString + " " + source.toString())
totale += i
@@ -42,9 +43,9 @@ object MainTokenMetric {
tokenMetric(compiler, command.files)
} catch {
case ex @ FatalError(msg) =>
- if (command.settings.debug.value)
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ if (command.settings.debug)
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index f5123513c4..95264aeda6 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -8,15 +8,9 @@ package scala.tools.nsc
import java.net.URL
import util.ScalaClassLoader
-import java.lang.reflect.InvocationTargetException
import util.Exceptional.unwrap
trait CommonRunner {
- /** Check whether a class with the specified name
- * exists on the specified class path. */
- def classExists(urls: List[URL], objectName: String): Boolean =
- ScalaClassLoader.classExists(urls, objectName)
-
/** Run a given object, specified by name, using a
* specified classpath and argument list.
*
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index caf6ad14cf..2ce2fb3eaa 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -26,8 +26,8 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
// instead of whatever it's supposed to be doing.
val baseDirectory = {
val pwd = System.getenv("PWD")
- if (pwd != null && !isWin) Directory(pwd)
- else Directory.Current getOrElse Directory("/")
+ if (pwd == null || isWin) Directory.Current getOrElse Directory("/")
+ else Directory(pwd)
}
currentDir.value = baseDirectory.path
}
@@ -39,7 +39,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
override def cmdName = "fsc"
override def usageMsg = (
- createUsageMsg("where possible fsc", false, x => x.isStandard && settings.isFscSpecific(x.name)) +
+ createUsageMsg("where possible fsc", shouldExplain = false, x => x.isStandard && settings.isFscSpecific(x.name)) +
"\n\nStandard scalac options also available:" +
createUsageMsg(x => x.isStandard && !settings.isFscSpecific(x.name))
)
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index cff3590b3f..ae71eb7255 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -55,7 +55,7 @@ trait PhaseAssembly {
* node object does not exist, then create it.
*/
def getNodeByPhase(phs: SubComponent): Node = {
- var node: Node = getNodeByPhase(phs.phaseName)
+ val node: Node = getNodeByPhase(phs.phaseName)
node.phaseobj match {
case None =>
node.phaseobj = Some(List[SubComponent](phs))
@@ -75,7 +75,7 @@ trait PhaseAssembly {
* list of the nodes
*/
def softConnectNodes(frm: Node, to: Node) {
- var e = new Edge(frm, to, false)
+ val e = new Edge(frm, to, false)
this.edges += e
frm.after += e
@@ -87,7 +87,7 @@ trait PhaseAssembly {
* list of the nodes
*/
def hardConnectNodes(frm: Node, to: Node) {
- var e = new Edge(frm, to, true)
+ val e = new Edge(frm, to, true)
this.edges += e
frm.after += e
@@ -164,8 +164,8 @@ trait PhaseAssembly {
} else {
- var promote = hl.to.before.filter(e => (!e.hard))
- hl.to.before.clear
+ val promote = hl.to.before.filter(e => (!e.hard))
+ hl.to.before.clear()
sanity foreach (edge => hl.to.before += edge)
for (edge <- promote) {
rerun = true
@@ -182,7 +182,7 @@ trait PhaseAssembly {
/** Remove all nodes in the given graph, that have no phase object
* Make sure to clean up all edges when removing the node object
- * <code>Inform</code> with warnings, if an external phase has a
+ * `Inform` with warnings, if an external phase has a
* dependency on something that is dropped.
*/
def removeDanglingNodes() {
@@ -245,7 +245,7 @@ trait PhaseAssembly {
for (phs <- phsSet) {
- var fromnode = graph.getNodeByPhase(phs)
+ val fromnode = graph.getNodeByPhase(phs)
phs.runsRightAfter match {
case None =>
@@ -306,7 +306,7 @@ trait PhaseAssembly {
sbuf.append("\"" + node.allPhaseNames + "(" + node.level + ")" + "\" [color=\"#0000ff\"]\n")
}
sbuf.append("}\n")
- var out = new BufferedWriter(new FileWriter(filename))
+ val out = new BufferedWriter(new FileWriter(filename))
out.write(sbuf.toString)
out.flush()
out.close()
diff --git a/src/compiler/scala/tools/nsc/Phases.scala b/src/compiler/scala/tools/nsc/Phases.scala
index 0901ade2d7..e379afce9b 100644
--- a/src/compiler/scala/tools/nsc/Phases.scala
+++ b/src/compiler/scala/tools/nsc/Phases.scala
@@ -5,7 +5,6 @@
package scala.tools.nsc
-import symtab.Flags
import scala.reflect.internal.util.TableDef
import scala.language.postfixOps
@@ -22,7 +21,6 @@ object Phases {
}
val values = new Array[Cell](MaxPhases + 1)
def results = values filterNot (_ == null)
- def apply(ph: Phase): T = values(ph.id).value
def update(ph: Phase, value: T): Unit = values(ph.id) = Cell(ph, value)
}
/** A class for recording the elapsed time of each phase in the
@@ -40,7 +38,6 @@ object Phases {
>> ("ms" -> (_.value)) >+ " "
<< ("share" -> (_.value.toDouble * 100 / total formatted "%.2f"))
}
- def formatted = "" + table()
}
}
diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala
index 55fd196716..feb4ded2f2 100644
--- a/src/compiler/scala/tools/nsc/Properties.scala
+++ b/src/compiler/scala/tools/nsc/Properties.scala
@@ -16,10 +16,6 @@ object Properties extends scala.util.PropertiesTrait {
def residentPromptString = scalaPropOrElse("resident.prompt", "\nnsc> ")
def shellPromptString = scalaPropOrElse("shell.prompt", "\nscala> ")
- // settings based on system properties
- def msilLibPath = propOrNone("msil.libpath")
-
// derived values
def isEmacsShell = propOrEmpty("env.emacs") != ""
- def fileEndings = fileEndingString.split("""\|""").toList
}
diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala
index 107c4b3df3..d553d71bf5 100644
--- a/src/compiler/scala/tools/nsc/ScriptRunner.scala
+++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala
@@ -3,11 +3,11 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
import io.{ Directory, File, Path }
import java.io.IOException
-import java.net.URL
import scala.tools.nsc.reporters.{Reporter,ConsoleReporter}
import util.Exceptional.unwrap
@@ -49,25 +49,12 @@ class ScriptRunner extends HasCompileSocket {
case x => x
}
- def isScript(settings: Settings) = settings.script.value != ""
-
/** Choose a jar filename to hold the compiled version of a script. */
private def jarFileFor(scriptFile: String)= File(
if (scriptFile endsWith ".jar") scriptFile
else scriptFile.stripSuffix(".scala") + ".jar"
)
- /** Read the entire contents of a file as a String. */
- private def contentsOfFile(filename: String) = File(filename).slurp()
-
- /** Split a fully qualified object name into a
- * package and an unqualified object name */
- private def splitObjectName(fullname: String): (Option[String], String) =
- (fullname lastIndexOf '.') match {
- case -1 => (None, fullname)
- case idx => (Some(fullname take idx), fullname drop (idx + 1))
- }
-
/** Compile a script using the fsc compilation daemon.
*/
private def compileWithDaemon(settings: GenericRunnerSettings, scriptFileIn: String) = {
@@ -98,8 +85,8 @@ class ScriptRunner extends HasCompileSocket {
{
def mainClass = scriptMain(settings)
- /** Compiles the script file, and returns the directory with the compiled
- * class files, if the compilation succeeded.
+ /* Compiles the script file, and returns the directory with the compiled
+ * class files, if the compilation succeeded.
*/
def compile: Option[Directory] = {
val compiledPath = Directory makeTemp "scalascript"
@@ -109,9 +96,9 @@ class ScriptRunner extends HasCompileSocket {
settings.outdir.value = compiledPath.path
- if (settings.nc.value) {
- /** Setting settings.script.value informs the compiler this is not a
- * self contained compilation unit.
+ if (settings.nc) {
+ /* Setting settings.script.value informs the compiler this is not a
+ * self contained compilation unit.
*/
settings.script.value = mainClass
val reporter = new ConsoleReporter(settings)
@@ -124,11 +111,11 @@ class ScriptRunner extends HasCompileSocket {
else None
}
- /** The script runner calls sys.exit to communicate a return value, but this must
- * not take place until there are no non-daemon threads running. Tickets #1955, #2006.
+ /* The script runner calls sys.exit to communicate a return value, but this must
+ * not take place until there are no non-daemon threads running. Tickets #1955, #2006.
*/
util.waitingForThreads {
- if (settings.save.value) {
+ if (settings.save) {
val jarFile = jarFileFor(scriptFile)
def jarOK = jarFile.canRead && (jarFile isFresher File(scriptFile))
diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala
index a0468a22b9..9b8582ae02 100644
--- a/src/compiler/scala/tools/nsc/SubComponent.scala
+++ b/src/compiler/scala/tools/nsc/SubComponent.scala
@@ -47,8 +47,8 @@ abstract class SubComponent {
private var ownPhaseCache: WeakReference[Phase] = new WeakReference(null)
private var ownPhaseRunId = global.NoRunId
- @inline final def beforeOwnPhase[T](op: => T) = global.beforePhase(ownPhase)(op)
- @inline final def afterOwnPhase[T](op: => T) = global.afterPhase(ownPhase)(op)
+ @inline final def beforeOwnPhase[T](op: => T) = global.enteringPhase(ownPhase)(op)
+ @inline final def afterOwnPhase[T](op: => T) = global.exitingPhase(ownPhase)(op)
/** The phase corresponding to this subcomponent in the current compiler run */
def ownPhase: Phase = {
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 6e39fc9aa1..f2e5c9b1eb 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -7,10 +7,7 @@ package scala.tools.nsc
package ast
import symtab._
-import reporters._
-import scala.reflect.internal.util.{Position, NoPosition}
import util.DocStrings._
-import scala.reflect.internal.Chars._
import scala.collection.mutable
/*
@@ -129,8 +126,6 @@ trait DocComments { self: Global =>
getDocComment(sym) map getUseCases getOrElse List()
}
- def useCases(sym: Symbol): List[(Symbol, String, Position)] = useCases(sym, sym.enclClass)
-
/** Returns the javadoc format of doc comment string `s`, including wiki expansion
*/
def toJavaDoc(s: String): String = expandWiki(s)
@@ -275,7 +270,7 @@ trait DocComments { self: Global =>
cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
case None =>
reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
- " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.", force = true)
"<invalid inheritdoc annotation>"
}
@@ -313,7 +308,6 @@ trait DocComments { self: Global =>
/** Lookup definition of variable.
*
* @param vble The variable for which a definition is searched
- * @param owner The current owner in which variable definitions are searched.
* @param site The class for which doc comments are generated
*/
def lookupVariable(vble: String, site: Symbol): Option[String] = site match {
@@ -330,12 +324,12 @@ trait DocComments { self: Global =>
}
/** Expand variable occurrences in string `str`, until a fix point is reached or
- * a expandLimit is exceeded.
+ * an expandLimit is exceeded.
*
- * @param str The string to be expanded
- * @param sym The symbol for which doc comments are generated
- * @param site The class for which doc comments are generated
- * @return Expanded string
+ * @param initialStr The string to be expanded
+ * @param sym The symbol for which doc comments are generated
+ * @param site The class for which doc comments are generated
+ * @return Expanded string
*/
protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol): String = {
val expandLimit = 10
@@ -372,7 +366,10 @@ trait DocComments { self: Global =>
case vname =>
lookupVariable(vname, site) match {
case Some(replacement) => replaceWith(replacement)
- case None => reporter.warning(sym.pos, "Variable " + vname + " undefined in comment for " + sym + " in " + site)
+ case None =>
+ val pos = docCommentPos(sym)
+ val loc = pos withPoint (pos.start + vstart + 1)
+ reporter.warning(loc, s"Variable $vname undefined in comment for $sym in $site")
}
}
}
@@ -470,7 +467,7 @@ trait DocComments { self: Global =>
//val (classes, pkgs) = site.ownerChain.span(!_.isPackageClass)
//val sites = (classes ::: List(pkgs.head, rootMirror.RootClass)))
//findIn(sites)
- findIn(site.ownerChain ::: List(definitions.EmptyPackage))
+ findIn(site.ownerChain ::: List(rootMirror.EmptyPackage))
}
def getType(str: String, variable: String): Type = {
@@ -507,7 +504,7 @@ trait DocComments { self: Global =>
result
}
- /**
+ /*
* work around the backticks issue suggested by Simon in
* https://groups.google.com/forum/?hl=en&fromgroups#!topic/scala-internals/z7s1CCRCz74
* ideally, we'd have a removeWikiSyntax method in the CommentFactory to completely eliminate the wiki markup
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index deea4de707..caab299635 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -32,7 +32,7 @@ abstract class NodePrinters {
}
trait DefaultPrintAST extends PrintAST {
- val printPos = settings.Xprintpos.value || settings.Yposdebug.value
+ val printPos = settings.Xprintpos || settings.Yposdebug
def showNameAndPos(tree: NameTree) = showPosition(tree) + showName(tree.name)
def showDefTreeName(tree: DefTree) = showName(tree.name)
@@ -100,9 +100,9 @@ abstract class NodePrinters {
def stringify(tree: Tree): String = {
buf.clear()
- if (settings.XshowtreesStringified.value) buf.append(tree.toString + EOL)
- if (settings.XshowtreesCompact.value) {
- buf.append(showRaw(tree, printIds = settings.uniqid.value, printTypes = settings.printtypes.value))
+ if (settings.XshowtreesStringified) buf.append(tree.toString + EOL)
+ if (settings.XshowtreesCompact) {
+ buf.append(showRaw(tree, printIds = settings.uniqid, printTypes = settings.printtypes))
} else {
level = 0
traverse(tree)
@@ -168,6 +168,13 @@ abstract class NodePrinters {
}
}
+ def typeApplyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ }
+
def treePrefix(tree: Tree) = showPosition(tree) + tree.productPrefix
def printMultiline(tree: Tree)(body: => Unit) {
printMultiline(treePrefix(tree), showAttributes(tree))(body)
@@ -203,9 +210,11 @@ abstract class NodePrinters {
showPosition(tree)
tree match {
- case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
- case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
- case Apply(fun, args) => applyCommon(tree, fun, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case TypeApply(fun, args) => typeApplyCommon(tree, fun, args)
+ case AppliedTypeTree(tpt, args) => typeApplyCommon(tree, tpt, args)
case Throw(Ident(name)) =>
printSingle(tree, name)
@@ -312,11 +321,6 @@ abstract class NodePrinters {
}
case This(qual) =>
printSingle(tree, qual)
- case TypeApply(fun, args) =>
- printMultiline(tree) {
- traverse(fun)
- traverseList("[]", "type argument")(args)
- }
case tt @ TypeTree() =>
println(showTypeTree(tt))
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index d8fb632f73..beab801edf 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -1,16 +1,9 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.util.{ SourceFile, Position, OffsetPosition, NoPosition }
-
trait Positions extends scala.reflect.internal.Positions {
self: Global =>
- def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new OffsetPosition(source, point)
-
- def validatePositions(tree: Tree) {}
-
class ValidatingPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
@@ -20,7 +13,7 @@ trait Positions extends scala.reflect.internal.Positions {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
- if ((c eq EmptyTree) || (c eq emptyValDef)) ()
+ if (!c.canHaveAttrs) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
@@ -32,6 +25,6 @@ trait Positions extends scala.reflect.internal.Positions {
}
override protected[this] lazy val posAssigner: PosAssigner =
- if (settings.Yrangepos.value && settings.debug.value || settings.Yposdebug.value) new ValidatingPosAssigner
+ if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner
else new DefaultPosAssigner
}
diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala
index 83222a24b4..89652c5a20 100644
--- a/src/compiler/scala/tools/nsc/ast/Printers.scala
+++ b/src/compiler/scala/tools/nsc/ast/Printers.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package ast
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
-import symtab.Flags._
-import symtab.SymbolTable
trait Printers extends scala.reflect.internal.Printers { this: Global =>
@@ -44,7 +42,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
}
}
- // overflow cases missing from TreePrinter in reflect.api
+ // overflow cases missing from TreePrinter in scala.reflect.api
override def xprintTree(treePrinter: super.TreePrinter, tree: Tree) = tree match {
case DocDef(comment, definition) =>
treePrinter.print(comment.raw)
@@ -130,7 +128,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
case Select(qualifier, name) =>
printTree(qualifier)
print(".")
- print(quotedName(name, true))
+ print(quotedName(name, decode = true))
// target.toString() ==> target.toString
case Apply(fn, Nil) => printTree(fn)
@@ -154,7 +152,7 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
// If thenp or elsep has only one statement, it doesn't need more than one line.
case If(cond, thenp, elsep) =>
def ifIndented(x: Tree) = {
- indent ; println() ; printTree(x) ; undent
+ indent() ; println() ; printTree(x) ; undent()
}
val List(thenStmts, elseStmts) = List(thenp, elsep) map allStatements
@@ -168,12 +166,12 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
if (elseStmts.nonEmpty) {
print(" else")
- indent ; println()
+ indent() ; println()
elseStmts match {
case List(x) => printTree(x)
case _ => printTree(elsep)
}
- undent ; println()
+ undent() ; println()
}
case _ => s()
}
@@ -202,94 +200,15 @@ trait Printers extends scala.reflect.internal.Printers { this: Global =>
override def printTree(tree: Tree) { print(safe(tree)) }
}
- class TreeMatchTemplate {
- // non-trees defined in Trees
- //
- // case class ImportSelector(name: Name, namePos: Int, rename: Name, renamePos: Int)
- // case class Modifiers(flags: Long, privateWithin: Name, annotations: List[Tree], positions: Map[Long, Position])
- //
- def apply(t: Tree): Unit = t match {
- // eliminated by typer
- case Annotated(annot, arg) =>
- case AssignOrNamedArg(lhs, rhs) =>
- case DocDef(comment, definition) =>
- case Import(expr, selectors) =>
-
- // eliminated by refchecks
- case ModuleDef(mods, name, impl) =>
- case TypeTreeWithDeferredRefCheck() =>
-
- // eliminated by erasure
- case TypeDef(mods, name, tparams, rhs) =>
- case Typed(expr, tpt) =>
-
- // eliminated by cleanup
- case ApplyDynamic(qual, args) =>
-
- // eliminated by explicitouter
- case Alternative(trees) =>
- case Bind(name, body) =>
- case CaseDef(pat, guard, body) =>
- case Star(elem) =>
- case UnApply(fun, args) =>
-
- // eliminated by lambdalift
- case Function(vparams, body) =>
-
- // eliminated by uncurry
- case AppliedTypeTree(tpt, args) =>
- case CompoundTypeTree(templ) =>
- case ExistentialTypeTree(tpt, whereClauses) =>
- case SelectFromTypeTree(qual, selector) =>
- case SingletonTypeTree(ref) =>
- case TypeBoundsTree(lo, hi) =>
-
- // survivors
- case Apply(fun, args) =>
- case ArrayValue(elemtpt, trees) =>
- case Assign(lhs, rhs) =>
- case Block(stats, expr) =>
- case ClassDef(mods, name, tparams, impl) =>
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- case EmptyTree =>
- case Ident(name) =>
- case If(cond, thenp, elsep) =>
- case LabelDef(name, params, rhs) =>
- case Literal(value) =>
- case Match(selector, cases) =>
- case New(tpt) =>
- case PackageDef(pid, stats) =>
- case Return(expr) =>
- case Select(qualifier, selector) =>
- case Super(qual, mix) =>
- case Template(parents, self, body) =>
- case This(qual) =>
- case Throw(expr) =>
- case Try(block, catches, finalizer) =>
- case TypeApply(fun, args) =>
- case TypeTree() =>
- case ValDef(mods, name, tpt, rhs) =>
-
- // missing from the Trees comment
- case Parens(args) => // only used during parsing
- case SelectFromArray(qual, name, erasure) => // only used during erasure
- }
- }
-
- def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
- def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes.value, settings.uniqid.value, settings.Yshowsymkinds.value)
+ def asString(t: Tree): String = render(t, newStandardTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymkinds)
+ def asCompactString(t: Tree): String = render(t, newCompactTreePrinter, settings.printtypes, settings.uniqid, settings.Yshowsymkinds)
def asCompactDebugString(t: Tree): String = render(t, newCompactTreePrinter, true, true, true)
def newStandardTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
- def newStandardTreePrinter(stream: OutputStream): TreePrinter = newStandardTreePrinter(new PrintWriter(stream))
- def newStandardTreePrinter(): TreePrinter = newStandardTreePrinter(new PrintWriter(ConsoleWriter))
-
def newCompactTreePrinter(writer: PrintWriter): CompactTreePrinter = new CompactTreePrinter(writer)
- def newCompactTreePrinter(stream: OutputStream): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(stream))
- def newCompactTreePrinter(): CompactTreePrinter = newCompactTreePrinter(new PrintWriter(ConsoleWriter))
override def newTreePrinter(writer: PrintWriter): TreePrinter =
- if (settings.Ycompacttrees.value) newCompactTreePrinter(writer)
+ if (settings.Ycompacttrees) newCompactTreePrinter(writer)
else newStandardTreePrinter(writer)
override def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
override def newTreePrinter(): TreePrinter = newTreePrinter(new PrintWriter(ConsoleWriter))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index 5c954096f4..1e70b7091b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package ast
import java.awt.{List => awtList, _}
@@ -16,8 +17,6 @@ import javax.swing.tree._
import scala.concurrent.Lock
import scala.text._
-import symtab.Flags._
-import symtab.SymbolTable
import scala.language.implicitConversions
/**
@@ -34,7 +33,7 @@ abstract class TreeBrowsers {
val borderSize = 10
- def create(): SwingBrowser = new SwingBrowser();
+ def create(): SwingBrowser = new SwingBrowser()
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
@@ -61,7 +60,7 @@ abstract class TreeBrowsers {
frame.createFrame(lock)
// wait for the frame to be closed
- lock.acquire
+ lock.acquire()
t
}
@@ -83,7 +82,7 @@ abstract class TreeBrowsers {
frame.createFrame(lock)
// wait for the frame to be closed
- lock.acquire
+ lock.acquire()
}
}
@@ -171,8 +170,8 @@ abstract class TreeBrowsers {
_setExpansionState(root, new TreePath(root.getModel.getRoot))
}
- def expandAll(subtree: JTree) = setExpansionState(subtree, true)
- def collapseAll(subtree: JTree) = setExpansionState(subtree, false)
+ def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true)
+ def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false)
/** Create a frame that displays the AST.
@@ -184,14 +183,14 @@ abstract class TreeBrowsers {
* especially symbols/types would change while the window is visible.
*/
def createFrame(lock: Lock): Unit = {
- lock.acquire // keep the lock until the user closes the window
+ lock.acquire() // keep the lock until the user closes the window
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
frame.addWindowListener(new WindowAdapter() {
/** Release the lock, so compilation may resume after the window is closed. */
- override def windowClosed(e: WindowEvent): Unit = lock.release
- });
+ override def windowClosed(e: WindowEvent): Unit = lock.release()
+ })
jTree = new JTree(treeModel) {
/** Return the string for a tree node. */
@@ -253,7 +252,7 @@ abstract class TreeBrowsers {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false))
override def actionPerformed(e: ActionEvent) {
closeWindow()
- global.currentRun.cancel
+ global.currentRun.cancel()
}
}
)
@@ -509,7 +508,7 @@ abstract class TreeBrowsers {
/** Return a textual representation of this t's symbol */
def symbolText(t: Tree): String = {
val prefix =
- if (t.hasSymbol) "[has] "
+ if (t.hasSymbolField) "[has] "
else if (t.isDef) "[defines] "
else ""
@@ -529,11 +528,10 @@ abstract class TreeBrowsers {
* attributes */
def symbolAttributes(t: Tree): String = {
val s = t.symbol
- var att = ""
if ((s ne null) && (s != NoSymbol)) {
- var str = flagsToString(s.flags)
- if (s.isStaticMember) str = str + " isStatic ";
+ var str = s.flagString
+ if (s.isStaticMember) str = str + " isStatic "
(str + " annotations: " + s.annotations.mkString("", " ", "")
+ (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 9a5b92e795..b17de9b9d5 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -21,7 +21,6 @@ trait TreeDSL {
import global._
import definitions._
- import gen.{ scalaDot }
object CODE {
// Add a null check to a Tree => Tree function
@@ -31,24 +30,17 @@ trait TreeDSL {
def returning[T](x: T)(f: T => Unit): T = util.returning(x)(f)
object LIT extends (Any => Literal) {
+ def typed(x: Any) = apply(x) setType ConstantType(Constant(x))
def apply(x: Any) = Literal(Constant(x))
def unapply(x: Any) = condOpt(x) { case Literal(Constant(value)) => value }
}
- // You might think these could all be vals, but empirically I have found that
- // at least in the case of UNIT the compiler breaks if you re-use trees.
- // However we need stable identifiers to have attractive pattern matching.
- // So it's inconsistent until I devise a better way.
- val TRUE = LIT(true)
- val FALSE = LIT(false)
- val ZERO = LIT(0)
- def NULL = LIT(null)
- def UNIT = LIT(())
-
- // for those preferring boring, predictable lives, without the thrills of tree-sharing
- // (but with the perk of typed trees)
- def TRUE_typed = LIT(true) setType ConstantType(Constant(true))
- def FALSE_typed = LIT(false) setType ConstantType(Constant(false))
+ // Boring, predictable trees.
+ def TRUE = LIT typed true
+ def FALSE = LIT typed false
+ def ZERO = LIT(0)
+ def NULL = LIT(null)
+ def UNIT = LIT(())
object WILD {
def empty = Ident(nme.WILDCARD)
@@ -82,19 +74,15 @@ trait TreeDSL {
if (opSym == NoSymbol) ANY_==(other)
else fn(target, opSym, other)
}
- def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectClass.tpe)
+ def ANY_EQ (other: Tree) = OBJ_EQ(other AS ObjectTpe)
def ANY_== (other: Tree) = fn(target, Any_==, other)
def ANY_!= (other: Tree) = fn(target, Any_!=, other)
- def OBJ_== (other: Tree) = fn(target, Object_==, other)
def OBJ_!= (other: Tree) = fn(target, Object_!=, other)
def OBJ_EQ (other: Tree) = fn(target, Object_eq, other)
def OBJ_NE (other: Tree) = fn(target, Object_ne, other)
- def INT_| (other: Tree) = fn(target, getMember(IntClass, nme.OR), other)
- def INT_& (other: Tree) = fn(target, getMember(IntClass, nme.AND), other)
def INT_>= (other: Tree) = fn(target, getMember(IntClass, nme.GE), other)
def INT_== (other: Tree) = fn(target, getMember(IntClass, nme.EQ), other)
- def INT_!= (other: Tree) = fn(target, getMember(IntClass, nme.NE), other)
// generic operations on ByteClass, IntClass, LongClass
def GEN_| (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.OR), other)
@@ -102,9 +90,6 @@ trait TreeDSL {
def GEN_== (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.EQ), other)
def GEN_!= (other: Tree, kind: ClassSymbol) = fn(target, getMember(kind, nme.NE), other)
- def BOOL_&& (other: Tree) = fn(target, Boolean_and, other)
- def BOOL_|| (other: Tree) = fn(target, Boolean_or, other)
-
/** Apply, Select, Match **/
def APPLY(params: Tree*) = Apply(target, params.toList)
def APPLY(params: List[Tree]) = Apply(target, params)
@@ -114,6 +99,10 @@ trait TreeDSL {
def DOT(sym: Symbol) = SelectStart(Select(target, sym))
/** Assignment */
+ // !!! This method is responsible for some tree sharing, but a diligent
+ // reviewer pointed out that we shouldn't blindly duplicate these trees
+ // as there might be DefTrees nested beneath them. It's not entirely
+ // clear how to proceed, so for now it retains the non-duplicating behavior.
def ===(rhs: Tree) = Assign(target, rhs)
/** Methods for sequences **/
@@ -127,11 +116,9 @@ trait TreeDSL {
* See ticket #2168 for one illustration of AS vs. AS_ANY.
*/
def AS(tpe: Type) = gen.mkAsInstanceOf(target, tpe, any = true, wrapInApply = false)
- def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, true)
- def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, false)
+ def IS(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = true)
+ def IS_OBJ(tpe: Type) = gen.mkIsInstanceOf(target, tpe, any = false)
- // XXX having some difficulty expressing nullSafe in a way that doesn't freak out value types
- // def TOSTRING() = nullSafe(fn(_: Tree, nme.toString_), LIT("null"))(target)
def TOSTRING() = fn(target, nme.toString_)
def GETCLASS() = fn(target, Object_getClass)
}
@@ -159,7 +146,6 @@ trait TreeDSL {
def mkTree(rhs: Tree): ResultTreeType
def ===(rhs: Tree): ResultTreeType
- private var _mods: Modifiers = null
private var _tpt: Tree = null
private var _pos: Position = null
@@ -167,19 +153,12 @@ trait TreeDSL {
_tpt = TypeTree(tp)
this
}
- def withFlags(flags: Long*): this.type = {
- if (_mods == null)
- _mods = defaultMods
-
- _mods = flags.foldLeft(_mods)(_ | _)
- this
- }
def withPos(pos: Position): this.type = {
_pos = pos
this
}
- final def mods = if (_mods == null) defaultMods else _mods
+ final def mods = defaultMods
final def tpt = if (_tpt == null) defaultTpt else _tpt
final def pos = if (_pos == null) defaultPos else _pos
}
@@ -199,7 +178,7 @@ trait TreeDSL {
self: VODDStart =>
type ResultTreeType = ValDef
- def mkTree(rhs: Tree): ValDef = ValDef(mods, name, tpt, rhs)
+ def mkTree(rhs: Tree): ValDef = ValDef(mods, name.toTermName, tpt, rhs)
}
trait DefCreator {
self: VODDStart =>
@@ -244,7 +223,6 @@ trait TreeDSL {
}
class TryStart(body: Tree, catches: List[CaseDef], fin: Tree) {
def CATCH(xs: CaseDef*) = new TryStart(body, xs.toList, fin)
- def FINALLY(x: Tree) = Try(body, catches, x)
def ENDTRY = Try(body, catches, fin)
}
@@ -252,16 +230,9 @@ trait TreeDSL {
def DEFAULT: CaseStart = new CaseStart(WILD.empty, EmptyTree)
class SymbolMethods(target: Symbol) {
- def BIND(body: Tree) = Bind(target, body)
- def IS_NULL() = REF(target) OBJ_EQ NULL
- def NOT_NULL() = REF(target) OBJ_NE NULL
-
- def GET() = fn(REF(target), nme.get)
-
- // name of nth indexed argument to a method (first parameter list), defaults to 1st
- def ARG(idx: Int = 0) = Ident(target.paramss.head(idx))
- def ARGS = target.paramss.head
- def ARGNAMES = ARGS map Ident
+ def IS_NULL() = REF(target) OBJ_EQ NULL
+ def GET() = fn(REF(target), nme.get)
+ def ARGS = target.paramss.head
}
/** Top level accessible. */
@@ -269,37 +240,18 @@ trait TreeDSL {
def THROW(sym: Symbol, msg: Tree): Throw = Throw(sym.tpe, msg.TOSTRING())
def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList))
- def NEW(sym: Symbol, args: Tree*): Tree = New(sym.tpe, args: _*)
-
- def DEF(name: Name, tp: Type): DefTreeStart = DEF(name) withType tp
- def DEF(name: Name): DefTreeStart = new DefTreeStart(name)
def DEF(sym: Symbol): DefSymStart = new DefSymStart(sym)
-
- def VAL(name: Name, tp: Type): ValTreeStart = VAL(name) withType tp
- def VAL(name: Name): ValTreeStart = new ValTreeStart(name)
def VAL(sym: Symbol): ValSymStart = new ValSymStart(sym)
- def VAR(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.MUTABLE
- def VAR(name: Name): ValTreeStart = VAL(name) withFlags Flags.MUTABLE
- def VAR(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.MUTABLE
-
- def LAZYVAL(name: Name, tp: Type): ValTreeStart = VAL(name, tp) withFlags Flags.LAZY
- def LAZYVAL(name: Name): ValTreeStart = VAL(name) withFlags Flags.LAZY
- def LAZYVAL(sym: Symbol): ValSymStart = VAL(sym) withFlags Flags.LAZY
-
def AND(guards: Tree*) =
if (guards.isEmpty) EmptyTree
else guards reduceLeft gen.mkAnd
- def OR(guards: Tree*) =
- if (guards.isEmpty) EmptyTree
- else guards reduceLeft gen.mkOr
-
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
def NOT(tree: Tree) = Select(tree, Boolean_not)
- def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, true))
+ def SOME(xs: Tree*) = Apply(SomeClass.companionSymbol, makeTupleTerm(xs.toList, flattenUnary = true))
/** Typed trees from symbols. */
def THIS(sym: Symbol) = gen.mkAttributedThis(sym)
@@ -312,11 +264,6 @@ trait TreeDSL {
case List(tree) if flattenUnary => tree
case _ => Apply(TupleClass(trees.length).companionModule, trees: _*)
}
- def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => gen.scalaUnitConstr
- case List(tree) if flattenUnary => tree
- case _ => AppliedTypeTree(REF(TupleClass(trees.length)), trees)
- }
/** Implicits - some of these should probably disappear **/
implicit def mkTreeMethods(target: Tree): TreeMethods = new TreeMethods(target)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 99b82d9746..692afbac66 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -8,7 +8,6 @@ package ast
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import symtab.SymbolTable
import scala.language.postfixOps
/** XXX to resolve: TreeGen only assumes global is a SymbolTable, but
@@ -20,18 +19,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
import global._
import definitions._
- def mkCheckInit(tree: Tree): Tree = {
- val tpe =
- if (tree.tpe != null || !tree.hasSymbol) tree.tpe
- else tree.symbol.tpe
-
- if (!global.phase.erasedTypes && settings.warnSelectNullable.value &&
- tpe <:< NotNullClass.tpe && !tpe.isNotNull)
- mkRuntimeCall(nme.checkInitialized, List(tree))
- else
- tree
- }
-
/** Builds a fully attributed wildcard import node.
*/
def mkWildcardImport(pkg: Symbol): Import = {
@@ -52,120 +39,30 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
}
// wrap the given expression in a SoftReference so it can be gc-ed
- def mkSoftRef(expr: Tree): Tree = atPos(expr.pos)(New(SoftReferenceClass.tpe, expr))
+ def mkSoftRef(expr: Tree): Tree = atPos(expr.pos) {
+ val constructor = SoftReferenceClass.info.nonPrivateMember(nme.CONSTRUCTOR).suchThat(_.paramss.flatten.size == 1)
+ NewFromConstructor(constructor, expr)
+ }
// annotate the expression with @unchecked
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
// are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
- }
- // if it's a Match, mark the selector unchecked; otherwise nothing.
- def mkUncheckedMatch(tree: Tree) = tree match {
- case Match(selector, cases) => atPos(tree.pos)(Match(mkUnchecked(selector), cases))
- case _ => tree
- }
-
- def mkSynthSwitchSelector(expr: Tree): Tree = atPos(expr.pos) {
- // This can't be "Annotated(New(SwitchClass), expr)" because annotations
- // are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(Ident(nme.synthSwitch), expr)
- }
-
- // TODO: would be so much nicer if we would know during match-translation (i.e., type checking)
- // whether we should emit missingCase-style apply (and isDefinedAt), instead of transforming trees post-factum
- class MatchMatcher {
- def caseMatch(orig: Tree, selector: Tree, cases: List[CaseDef], wrap: Tree => Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = unknownTree(orig)
- def caseVirtualizedMatchOpt(orig: Tree, prologue: List[Tree], cases: List[Tree], matchEndDef: Tree, wrap: Tree => Tree): Tree = unknownTree(orig)
-
- def genVirtualizedMatch(prologue: List[Tree], cases: List[Tree], matchEndDef: Tree): Tree = Block(prologue ++ cases, matchEndDef)
-
- def apply(matchExpr: Tree): Tree = matchExpr match {
- // old-style match or virtpatmat switch
- case Match(selector, cases) => // println("simple match: "+ (selector, cases) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, identity)
- // old-style match or virtpatmat switch
- case Block((vd: ValDef) :: Nil, orig@Match(selector, cases)) => // println("block match: "+ (selector, cases, vd) + "for:\n"+ matchExpr )
- caseMatch(matchExpr, selector, cases, m => copyBlock(matchExpr, List(vd), m))
- // virtpatmat
- case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), List(scrut)), List(matcher)) if opt.virtPatmat => // println("virt match: "+ (tgt, targs, scrut, matcher) + "for:\n"+ matchExpr )
- caseVirtualizedMatch(matchExpr, tgt, targs, scrut, matcher)
- // optimized version of virtpatmat
- case Block(stats, matchEndDef) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, identity)
- // optimized version of virtpatmat
- case Block(outerStats, orig@Block(stats, matchEndDef)) if opt.virtPatmat && (stats forall treeInfo.hasSynthCaseSymbol) =>
- val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- caseVirtualizedMatchOpt(matchExpr, prologue, cases, matchEndDef, m => copyBlock(matchExpr, outerStats, m))
- case other =>
- unknownTree(other)
- }
-
- def unknownTree(t: Tree): Tree = throw new MatchError(t)
- def copyBlock(orig: Tree, stats: List[Tree], expr: Tree): Block = Block(stats, expr)
-
- def dropSyntheticCatchAll(cases: List[CaseDef]): List[CaseDef] =
- if (!opt.virtPatmat) cases
- else cases filter {
- case CaseDef(pat, EmptyTree, Throw(Apply(Select(New(exTpt), nme.CONSTRUCTOR), _))) if (treeInfo.isWildcardArg(pat) && (exTpt.tpe.typeSymbol eq MatchErrorClass)) => false
- case CaseDef(pat, guard, body) => true
- }
- }
-
- def mkCached(cvar: Symbol, expr: Tree): Tree = {
- val cvarRef = mkUnattributedRef(cvar)
- Block(
- List(
- If(Apply(Select(cvarRef, nme.eq), List(Literal(Constant(null)))),
- Assign(cvarRef, expr),
- EmptyTree)),
- cvarRef
- )
+ Annotated(New(scalaDot(UncheckedClass.name), Nil), expr)
}
// Builds a tree of the form "{ lhs = rhs ; lhs }"
def mkAssignAndReturn(lhs: Symbol, rhs: Tree): Tree = {
- val lhsRef = mkUnattributedRef(lhs)
+ def lhsRef = if (lhs.owner.isClass) Select(This(lhs.owner), lhs) else Ident(lhs)
Block(Assign(lhsRef, rhs) :: Nil, lhsRef)
}
- def mkModuleVarDef(accessor: Symbol) = {
- val inClass = accessor.owner.isClass
- val extraFlags = if (inClass) PrivateLocal | SYNTHETIC else 0
-
- val mval = (
- accessor.owner.newVariable(nme.moduleVarName(accessor.name), accessor.pos.focus, MODULEVAR | extraFlags)
- setInfo accessor.tpe.finalResultType
- addAnnotation VolatileAttr
- )
- if (inClass)
- mval.owner.info.decls enter mval
-
- ValDef(mval)
- }
-
- // def m: T = { if (m$ eq null) m$ = new m$class(...) m$ }
- // where (...) are eventual outer accessors
- def mkCachedModuleAccessDef(accessor: Symbol, mvar: Symbol) =
- DefDef(accessor, mkCached(mvar, newModule(accessor, mvar.tpe)))
-
- def mkModuleAccessDef(accessor: Symbol, msym: Symbol) =
- DefDef(accessor, Select(This(msym.owner), msym))
-
def newModule(accessor: Symbol, tpe: Type) = {
val ps = tpe.typeSymbol.primaryConstructor.info.paramTypes
if (ps.isEmpty) New(tpe)
else New(tpe, This(accessor.owner.enclClass))
}
- // def m: T;
- def mkModuleAccessDcl(accessor: Symbol) =
- DefDef(accessor setFlag lateDEFERRED, EmptyTree)
-
def mkRuntimeCall(meth: Name, args: List[Tree]): Tree =
mkRuntimeCall(meth, Nil, args)
@@ -206,7 +103,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
- def mkSuperSelect = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
+ def mkSuperInitCall: Select = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -267,25 +164,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else
mkCast(tree, pt)
- def mkZeroContravariantAfterTyper(tp: Type): Tree = {
- // contravariant -- for replacing an argument in a method call
- // must use subtyping, as otherwise we miss types like `Any with Int`
- val tree =
- if (NullClass.tpe <:< tp) Literal(Constant(null))
- else if (UnitClass.tpe <:< tp) Literal(Constant())
- else if (BooleanClass.tpe <:< tp) Literal(Constant(false))
- else if (FloatClass.tpe <:< tp) Literal(Constant(0.0f))
- else if (DoubleClass.tpe <:< tp) Literal(Constant(0.0d))
- else if (ByteClass.tpe <:< tp) Literal(Constant(0.toByte))
- else if (ShortClass.tpe <:< tp) Literal(Constant(0.toShort))
- else if (IntClass.tpe <:< tp) Literal(Constant(0))
- else if (LongClass.tpe <:< tp) Literal(Constant(0L))
- else if (CharClass.tpe <:< tp) Literal(Constant(0.toChar))
- else mkCast(Literal(Constant(null)), tp)
-
- tree
- }
-
/** Translate names in Select/Ident nodes to type names.
*/
def convertToTypeName(tree: Tree): Option[RefTree] = tree match {
@@ -307,7 +185,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
*/
private def mkPackedValDef(expr: Tree, owner: Symbol, name: Name): (ValDef, () => Ident) = {
val packedType = typer.packedType(expr, owner)
- val sym = owner.newValue(name, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
+ val sym = owner.newValue(name.toTermName, expr.pos.makeTransparent, SYNTHETIC) setInfo packedType
(ValDef(sym, expr), () => Ident(sym) setPos sym.pos.focus setType expr.tpe)
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index cbbb4c8ba8..0731d78a9b 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -6,10 +6,6 @@
package scala.tools.nsc
package ast
-import scala.reflect.internal.HasFlags
-import scala.reflect.internal.Flags._
-import symtab._
-
/** This class ...
*
* @author Martin Odersky
@@ -18,8 +14,65 @@ import symtab._
abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
val global: Global
import global._
+ import definitions._
+
+ // arg1.op(arg2) returns (arg1, op.symbol, arg2)
+ object BinaryOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case Apply(sel @ Select(arg1, _), arg2 :: Nil) => Some((arg1, sel.symbol, arg2))
+ case _ => None
+ }
+ }
+ // recv.op[T1, ...] returns (recv, op.symbol, type argument types)
+ object TypeApplyOp {
+ def unapply(t: Tree): Option[(Tree, Symbol, List[Type])] = t match {
+ case TypeApply(sel @ Select(recv, _), targs) => Some((recv, sel.symbol, targs map (_.tpe)))
+ case _ => None
+ }
+ }
+
+ // x.asInstanceOf[T] returns (x, typeOf[T])
+ object AsInstanceOf {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(TypeApplyOp(recv, Object_asInstanceOf, tpe :: Nil), Nil) => Some((recv, tpe))
+ case _ => None
+ }
+ }
- import definitions.ThrowableClass
+ // Extractors for value classes.
+ object ValueClass {
+ def isValueClass(tpe: Type) = enteringErasure(tpe.typeSymbol.isDerivedValueClass)
+ def valueUnbox(tpe: Type) = enteringErasure(tpe.typeSymbol.derivedValueClassUnbox)
+
+ // B.unbox. Returns B.
+ object Unbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Apply(sel @ Select(ref, _), Nil) if valueUnbox(ref.tpe) == sel.symbol => Some(ref)
+ case _ => None
+ }
+ }
+ // new B(v). Returns B and v.
+ object Box {
+ def unapply(t: Tree): Option[(Tree, Type)] = t match {
+ case Apply(sel @ Select(New(tpt), nme.CONSTRUCTOR), v :: Nil) => Some((v, tpt.tpe.finalResultType))
+ case _ => None
+ }
+ }
+ // (new B(v)).unbox. returns v.
+ object BoxAndUnbox {
+ def unapply(t: Tree): Option[Tree] = t match {
+ case Unbox(Box(v, tpe)) if isValueClass(tpe) => Some(v)
+ case _ => None
+ }
+ }
+ // new B(v1) op new B(v2) where op is == or !=. Returns v1, op, v2.
+ object BoxAndCompare {
+ def unapply(t: Tree): Option[(Tree, Symbol, Tree)] = t match {
+ case BinaryOp(Box(v1, tpe1), op @ (Object_== | Object_!=), Box(v2, tpe2)) if isValueClass(tpe1) && tpe1 =:= tpe2 => Some((v1, op, v2))
+ case _ => None
+ }
+ }
+ }
/** Is tree legal as a member definition of an interface?
*/
@@ -34,15 +87,4 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo {
case DocDef(_, definition) => isPureDef(definition)
case _ => super.isPureDef(tree)
}
-
- /** Does list of trees start with a definition of
- * a class of module with given name (ignoring imports)
- */
- override def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
- case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => super.firstDefinesClassOrObject(trees, name)
- }
-
- def isInterface(mods: HasFlags, body: List[Tree]) =
- mods.isTrait && (body forall isInterfaceMember)
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 0a12737572..8391ebdafc 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -16,24 +16,6 @@ import scala.reflect.internal.Flags.TRAIT
import scala.compat.Platform.EOL
trait Trees extends scala.reflect.internal.Trees { self: Global =>
-
- def treeLine(t: Tree): String =
- if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
- else t.summaryString
-
- def treeStatus(t: Tree, enclosingTree: Tree = null) = {
- val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
-
- "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
- }
- def treeSymStatus(t: Tree) = {
- val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
- "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
- if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
- else treeLine(t)
- )
- }
-
// --- additional cases --------------------------------------------------------
/** Only used during parsing */
case class Parens(args: List[Tree]) extends Tree
@@ -65,6 +47,13 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// --- factory methods ----------------------------------------------------------
+ /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
+ */
+ def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
+ case Nil => Apply(gen.mkSuperInitCall, Nil)
+ case xs :: rest => rest.foldLeft(Apply(gen.mkSuperInitCall, xs): Tree)(Apply.apply)
+ }
+
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
@@ -82,7 +71,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* body
* }
*/
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
+ def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
@@ -111,16 +100,22 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
if (body forall treeInfo.isInterfaceMember) List()
else List(
atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant())))))
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant(()))))))
} else {
// convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
- vparamss1 = List() :: vparamss1;
- val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- val superCall = (superRef /: argss) (Apply.apply)
+ vparamss1 = List() :: vparamss1
+ val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
+ // this requires knowing which of the parents is a type macro and which is not
+ // and that's something that cannot be found out before typer
+ // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
+ // this means that we don't know what will be the arguments of the super call
+ // therefore here we emit a dummy which gets populated when the template is named and typechecked
List(
- atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
+ // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
+ // is it going to be a problem that we can no longer include the `argss`?
+ atPos(wrappingPos(superPos, lvdefs)) (
+ DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant(()))))))
}
}
constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
@@ -137,11 +132,10 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
* should be owned by `sym`
- * @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
assert(
mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
@@ -151,7 +145,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
ClassDef(sym,
Template(sym.info.parents map TypeTree,
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, argss, body, superPos))
+ constrMods, vparamss, body, superPos))
}
// --- subcomponents --------------------------------------------------
@@ -324,6 +318,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
else
super.transform {
tree match {
+ case tree if !tree.canHaveAttrs =>
+ tree
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
@@ -331,9 +327,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
val isInferred = tpt.wasEmpty
if (refersToLocalSymbols || isInferred) {
- val dupl = tpt.duplicate
- dupl.tpe = null
- dupl
+ tpt.duplicate.clearType()
} else {
tpt
}
@@ -382,8 +376,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
}
- dupl.tpe = null
- dupl
+ dupl.clearType()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
index 553a2088a6..832a9bf63e 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala
@@ -10,10 +10,8 @@ import scala.collection.mutable
import mutable.{ Buffer, ArrayBuffer, ListBuffer }
import scala.util.control.ControlThrowable
import scala.tools.nsc.util.CharArrayReader
-import scala.reflect.internal.util.SourceFile
-import scala.xml.{ Text, TextBuffer }
+import scala.xml.TextBuffer
import scala.xml.parsing.MarkupParserCommon
-import scala.xml.Utility.{ isNameStart, isNameChar, isSpace }
import scala.reflect.internal.Chars.{ SU, LF }
// XXX/Note: many/most of the functions in here are almost direct cut and pastes
@@ -26,12 +24,6 @@ import scala.reflect.internal.Chars.{ SU, LF }
// I rewrote most of these, but not as yet the library versions: so if you are
// tempted to touch any of these, please be aware of that situation and try not
// to let it get any worse. -- paulp
-
-/** This trait ...
- *
- * @author Burak Emir
- * @version 1.0
- */
trait MarkupParsers {
self: Parsers =>
@@ -51,7 +43,7 @@ trait MarkupParsers {
class MarkupParser(parser: SourceFileParser, final val preserveWS: Boolean) extends MarkupParserCommon {
- import Tokens.{ EMPTY, LBRACE, RBRACE }
+ import Tokens.{ LBRACE, RBRACE }
type PositionType = Position
type InputType = CharArrayReader
@@ -89,7 +81,7 @@ trait MarkupParsers {
var xEmbeddedBlock = false
- private var debugLastStartElement = new mutable.Stack[(Int, String)]
+ private val debugLastStartElement = new mutable.Stack[(Int, String)]
private def debugLastPos = debugLastStartElement.top._1
private def debugLastElem = debugLastStartElement.top._2
@@ -107,7 +99,7 @@ trait MarkupParsers {
*/
def xCheckEmbeddedBlock: Boolean = {
// attentions, side-effect, used in xText
- xEmbeddedBlock = (ch == '{') && { nextch; (ch != '{') }
+ xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') }
xEmbeddedBlock
}
@@ -123,8 +115,7 @@ trait MarkupParsers {
while (isNameStart(ch)) {
val start = curOffset
val key = xName
- xEQ
- val delim = ch
+ xEQ()
val mid = curOffset
val value: Tree = ch match {
case '"' | '\'' =>
@@ -137,7 +128,7 @@ trait MarkupParsers {
}
case '{' =>
- nextch
+ nextch()
xEmbeddedExpr
case SU =>
throw TruncatedXMLControl
@@ -150,7 +141,7 @@ trait MarkupParsers {
aMap(key) = value
if (ch != '/' && ch != '>')
- xSpace
+ xSpace()
}
aMap
}
@@ -193,10 +184,10 @@ trait MarkupParsers {
* @precond ch == '&'
*/
def content_AMP(ts: ArrayBuffer[Tree]) {
- nextch
+ nextch()
val toAppend = ch match {
case '#' => // CharacterRef
- nextch
+ nextch()
val theChar = handle.text(tmppos, xCharRef)
xToken(';')
theChar
@@ -219,17 +210,14 @@ trait MarkupParsers {
/** Returns true if it encounters an end tag (without consuming it),
* appends trees to ts as side-effect.
- *
- * @param ts ...
- * @return ...
*/
private def content_LT(ts: ArrayBuffer[Tree]): Boolean = {
if (ch == '/')
return true // end tag
val toAppend = ch match {
- case '!' => nextch ; if (ch =='[') xCharData else xComment // CDATA or Comment
- case '?' => nextch ; xProcInstr // PI
+ case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment
+ case '?' => nextch() ; xProcInstr // PI
case _ => element // child node
}
@@ -246,7 +234,7 @@ trait MarkupParsers {
tmppos = o2p(curOffset)
ch match {
// end tag, cdata, comment, pi or child node
- case '<' => nextch ; if (content_LT(ts)) return ts
+ case '<' => nextch() ; if (content_LT(ts)) return ts
// either the character '{' or an embedded scala block }
case '{' => content_BRACE(tmppos, ts) // }
// EntityRef or CharRef
@@ -268,7 +256,7 @@ trait MarkupParsers {
val (qname, attrMap) = xTag(())
if (ch == '/') { // empty element
xToken("/>")
- handle.element(r2p(start, start, curOffset), qname, attrMap, true, new ListBuffer[Tree])
+ handle.element(r2p(start, start, curOffset), qname, attrMap, empty = true, new ListBuffer[Tree])
}
else { // handle content
xToken('>')
@@ -278,11 +266,11 @@ trait MarkupParsers {
debugLastStartElement.push((start, qname))
val ts = content
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
val pos = r2p(start, start, curOffset)
qname match {
case "xml:group" => handle.group(pos, ts)
- case _ => handle.element(pos, qname, attrMap, false, ts)
+ case _ => handle.element(pos, qname, attrMap, empty = false, ts)
}
}
}
@@ -297,12 +285,12 @@ trait MarkupParsers {
while (ch != SU) {
if (ch == '}') {
- if (charComingAfter(nextch) == '}') nextch
+ if (charComingAfter(nextch()) == '}') nextch()
else errorBraces()
}
buf append ch
- nextch
+ nextch()
if (xCheckEmbeddedBlock || ch == '<' || ch == '&')
return done
}
@@ -349,12 +337,12 @@ trait MarkupParsers {
content_LT(ts)
// parse more XML ?
- if (charComingAfter(xSpaceOpt) == '<') {
- xSpaceOpt
+ if (charComingAfter(xSpaceOpt()) == '<') {
+ xSpaceOpt()
while (ch == '<') {
- nextch
+ nextch()
ts append element
- xSpaceOpt
+ xSpaceOpt()
}
handle.makeXMLseq(r2p(start, start, curOffset), ts)
}
@@ -375,7 +363,7 @@ trait MarkupParsers {
saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) {
handle.isPattern = true
val tree = xPattern
- xSpaceOpt
+ xSpaceOpt()
tree
}
},
@@ -410,13 +398,13 @@ trait MarkupParsers {
* | Name [S] '/' '>'
*/
def xPattern: Tree = {
- var start = curOffset
+ val start = curOffset
val qname = xName
debugLastStartElement.push((start, qname))
- xSpaceOpt
+ xSpaceOpt()
val ts = new ArrayBuffer[Tree]
- val isEmptyTag = (ch == '/') && { nextch ; true }
+ val isEmptyTag = (ch == '/') && { nextch() ; true }
xToken('>')
if (!isEmptyTag) {
@@ -426,13 +414,13 @@ trait MarkupParsers {
if (xEmbeddedBlock) ts ++= xScalaPatterns
else ch match {
case '<' => // tag
- nextch
+ nextch()
if (ch != '/') ts append xPattern // child
else return false // terminate
case '{' => // embedded Scala patterns
while (ch == '{') {
- nextch
+ nextch()
ts ++= xScalaPatterns
}
assert(!xEmbeddedBlock, "problem with embedded block")
@@ -450,7 +438,7 @@ trait MarkupParsers {
while (doPattern) { } // call until false
xEndTag(qname)
- debugLastStartElement.pop
+ debugLastStartElement.pop()
}
handle.makeXMLpat(r2p(start, start, curOffset), qname, ts)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 50398824e1..fc532f5d44 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -9,7 +9,8 @@
package scala.tools.nsc
package ast.parser
-import scala.collection.mutable.{ListBuffer, StringBuilder}
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, StringBuilder }
import scala.reflect.internal.{ ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
import scala.reflect.internal.util.{ SourceFile, OffsetPosition }
@@ -29,6 +30,9 @@ trait ParsersCommon extends ScannersCommon {
val global : Global
import global._
+ def newLiteral(const: Any) = Literal(Constant(const))
+ def literalUnit = newLiteral(())
+
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
@@ -56,7 +60,7 @@ trait ParsersCommon extends ScannersCommon {
if (in.token == LPAREN) inParens(body)
else { accept(LPAREN) ; alt }
- @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, Literal(Constant()))
+ @inline final def inParensOrUnit[T](body: => Tree): Tree = inParensOrError(body, literalUnit)
@inline final def inParensOrNil[T](body: => List[T]): List[T] = inParensOrError(body, Nil)
@inline final def inBraces[T](body: => T): T = {
@@ -70,7 +74,7 @@ trait ParsersCommon extends ScannersCommon {
else { accept(LBRACE) ; alt }
@inline final def inBracesOrNil[T](body: => List[T]): List[T] = inBracesOrError(body, Nil)
- @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, Literal(Constant()))
+ @inline final def inBracesOrUnit[T](body: => Tree): Tree = inBracesOrError(body, literalUnit)
@inline final def dropAnyBraces[T](body: => T): T =
if (in.token == LBRACE) inBraces(body)
else body
@@ -94,7 +98,7 @@ trait ParsersCommon extends ScannersCommon {
* <ol>
* <li>
* Places all pattern variables in Bind nodes. In a pattern, for
- * identifiers <code>x</code>:<pre>
+ * identifiers `x`:<pre>
* x => x @ _
* x:T => x @ (_ : T)</pre>
* </li>
@@ -141,9 +145,9 @@ self =>
if (source.isSelfContained) () => compilationUnit()
else () => scriptBody()
- def newScanner = new SourceFileScanner(source)
+ def newScanner(): Scanner = new SourceFileScanner(source)
- val in = newScanner
+ val in = newScanner()
in.init()
private val globalFresh = new FreshNameCreator.Default
@@ -167,11 +171,10 @@ self =>
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
val global: self.global.type = self.global
- def freshName(prefix: String): Name = SourceFileParser.this.freshName(prefix)
}
- def xmlLiteral : Tree = xmlp.xLiteral
- def xmlLiteralPattern : Tree = xmlp.xLiteralPattern
+ def xmlLiteral() : Tree = xmlp.xLiteral
+ def xmlLiteralPattern() : Tree = xmlp.xLiteralPattern
}
class OutlineParser(source: SourceFile) extends SourceFileParser(source) {
@@ -196,10 +199,9 @@ self =>
}
class UnitParser(val unit: global.CompilationUnit, patches: List[BracePatch]) extends SourceFileParser(unit.source) {
+ def this(unit: global.CompilationUnit) = this(unit, Nil)
- def this(unit: global.CompilationUnit) = this(unit, List())
-
- override def newScanner = new UnitScanner(unit, patches)
+ override def newScanner() = new UnitScanner(unit, patches)
override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
@@ -219,6 +221,7 @@ self =>
try body
finally smartParsing = saved
}
+ def withPatches(patches: List[BracePatch]): UnitParser = new UnitParser(unit, patches)
val syntaxErrors = new ListBuffer[(Int, String)]
def showSyntaxErrors() =
@@ -244,7 +247,7 @@ self =>
if (syntaxErrors.isEmpty) firstTry
else in.healBraces() match {
case Nil => showSyntaxErrors() ; firstTry
- case patches => new UnitParser(unit, patches).parse()
+ case patches => (this withPatches patches).parse()
}
}
}
@@ -299,11 +302,7 @@ self =>
inScalaPackage = false
currentPackage = ""
}
- private lazy val primitiveNames: Set[Name] = tpnme.ScalaValueNames.toSet
-
- private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
- private def isScalaArray(name: Name) = inScalaRootPackage && name == tpnme.Array
- private def isPrimitiveType(name: Name) = inScalaRootPackage && primitiveNames(name)
+ private def inScalaRootPackage = inScalaPackage && currentPackage == "scala"
def parseStartRule: () => Tree
@@ -325,47 +324,47 @@ self =>
accept(EOF)
def mainModuleName = newTermName(settings.script.value)
- /** If there is only a single object template in the file and it has a
- * suitable main method, we will use it rather than building another object
- * around it. Since objects are loaded lazily the whole script would have
- * been a no-op, so we're not taking much liberty.
+ /* If there is only a single object template in the file and it has a
+ * suitable main method, we will use it rather than building another object
+ * around it. Since objects are loaded lazily the whole script would have
+ * been a no-op, so we're not taking much liberty.
*/
def searchForMain(): Option[Tree] = {
- /** Have to be fairly liberal about what constitutes a main method since
- * nothing has been typed yet - for instance we can't assume the parameter
- * type will look exactly like "Array[String]" as it could have been renamed
- * via import, etc.
+ /* Have to be fairly liberal about what constitutes a main method since
+ * nothing has been typed yet - for instance we can't assume the parameter
+ * type will look exactly like "Array[String]" as it could have been renamed
+ * via import, etc.
*/
def isMainMethod(t: Tree) = t match {
case DefDef(_, nme.main, Nil, List(_), _, _) => true
case _ => false
}
- /** For now we require there only be one top level object. */
+ /* For now we require there only be one top level object. */
var seenModule = false
val newStmts = stmts collect {
case t @ Import(_, _) => t
case md @ ModuleDef(mods, name, template) if !seenModule && (md exists isMainMethod) =>
seenModule = true
- /** This slightly hacky situation arises because we have no way to communicate
- * back to the scriptrunner what the name of the program is. Even if we were
- * willing to take the sketchy route of settings.script.value = progName, that
- * does not work when using fsc. And to find out in advance would impose a
- * whole additional parse. So instead, if the actual object's name differs from
- * what the script is expecting, we transform it to match.
+ /* This slightly hacky situation arises because we have no way to communicate
+ * back to the scriptrunner what the name of the program is. Even if we were
+ * willing to take the sketchy route of settings.script.value = progName, that
+ * does not work when using fsc. And to find out in advance would impose a
+ * whole additional parse. So instead, if the actual object's name differs from
+ * what the script is expecting, we transform it to match.
*/
if (name == mainModuleName) md
else treeCopy.ModuleDef(md, mods, mainModuleName, template)
case _ =>
- /** If we see anything but the above, fail. */
+ /* If we see anything but the above, fail. */
return None
}
- Some(makePackaging(0, emptyPkg, newStmts))
+ Some(makeEmptyPackage(0, newStmts))
}
if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain))
searchForMain() foreach { return _ }
- /** Here we are building an AST representing the following source fiction,
+ /* Here we are building an AST representing the following source fiction,
* where `moduleName` is from -Xscript (defaults to "Main") and <stmts> are
* the result of parsing the script file.
*
@@ -380,16 +379,13 @@ self =>
* }
* }}}
*/
- import definitions._
-
- def emptyPkg = atPos(0, 0, 0) { Ident(nme.EMPTY_PACKAGE_NAME) }
def emptyInit = DefDef(
NoMods,
nme.CONSTRUCTOR,
Nil,
ListOfNil,
TypeTree(),
- Block(List(Apply(gen.mkSuperSelect, Nil)), Literal(Constant(())))
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit)
)
// def main
@@ -400,11 +396,11 @@ self =>
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
- def moduleBody = Template(List(atPos(o2p(in.offset))(scalaAnyRefConstr)), emptyValDef, List(emptyInit, mainDef))
+ def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, emptyValDef, List(emptyInit, mainDef))
def moduleDef = ModuleDef(NoMods, moduleName, moduleBody)
// package <empty> { ... }
- makePackaging(0, emptyPkg, List(moduleDef))
+ makeEmptyPackage(0, moduleDef :: Nil)
}
/* --------------- PLACEHOLDERS ------------------------------------------- */
@@ -429,13 +425,13 @@ self =>
placeholderParams match {
case vd :: _ =>
- syntaxError(vd.pos, "unbound placeholder parameter", false)
+ syntaxError(vd.pos, "unbound placeholder parameter", skipIt = false)
placeholderParams = List()
case _ =>
}
placeholderTypes match {
case td :: _ =>
- syntaxError(td.pos, "unbound wildcard type", false)
+ syntaxError(td.pos, "unbound wildcard type", skipIt = false)
placeholderTypes = List()
case _ =>
}
@@ -468,7 +464,7 @@ self =>
/* ------------- ERROR HANDLING ------------------------------------------- */
- var assumedClosingParens = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val assumedClosingParens = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
private var inFunReturnType = false
@inline private def fromWithinReturnType[T](body: => T): T = {
@@ -534,6 +530,10 @@ self =>
else
syntaxError(in.offset, msg, skipIt)
}
+ def syntaxErrorOrIncompleteAnd[T](msg: String, skipIt: Boolean)(and: T): T = {
+ syntaxErrorOrIncomplete(msg, skipIt)
+ and
+ }
def expectedMsg(token: Int): String =
token2string(token) + " expected but " +token2string(in.token) + " found."
@@ -542,7 +542,7 @@ self =>
def accept(token: Int): Int = {
val offset = in.offset
if (in.token != token) {
- syntaxErrorOrIncomplete(expectedMsg(token), false)
+ syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
if ((token == RPAREN || token == RBRACE || token == RBRACKET))
if (in.parenBalance(token) + assumedClosingParens(token) < 0)
assumedClosingParens(token) += 1
@@ -568,16 +568,16 @@ self =>
if (!isStatSeqEnd)
acceptStatSep()
- def errorTypeTree = TypeTree() setType ErrorType setPos o2p(in.offset)
- def errorTermTree = Literal(Constant(null)) setPos o2p(in.offset)
- def errorPatternTree = Ident(nme.WILDCARD) setPos o2p(in.offset)
+ def errorTypeTree = setInPos(TypeTree() setType ErrorType)
+ def errorTermTree = setInPos(newLiteral(null))
+ def errorPatternTree = setInPos(Ident(nme.WILDCARD))
/** Check that type parameter is not by name or repeated. */
def checkNotByNameOrVarargs(tpt: Tree) = {
if (treeInfo isByNameParamType tpt)
- syntaxError(tpt.pos, "no by-name parameter type allowed here", false)
+ syntaxError(tpt.pos, "no by-name parameter type allowed here", skipIt = false)
else if (treeInfo isRepeatedParamType tpt)
- syntaxError(tpt.pos, "no * parameter type allowed here", false)
+ syntaxError(tpt.pos, "no * parameter type allowed here", skipIt = false)
}
/** Check that tree is a legal clause of a forSome. */
@@ -586,7 +586,7 @@ self =>
ValDef(_, _, _, EmptyTree) | EmptyTree =>
;
case _ =>
- syntaxError(t.pos, "not a legal existential clause", false)
+ syntaxError(t.pos, "not a legal existential clause", skipIt = false)
}
/* -------------- TOKEN CLASSES ------------------------------------------- */
@@ -612,6 +612,10 @@ self =>
}
def isDefIntro = isTemplateIntro || isDclIntro
+ def isTopLevelIntro = in.token match {
+ case PACKAGE | IMPORT | AT => true
+ case _ => isTemplateIntro || isModifier
+ }
def isNumericLit: Boolean = in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT => true
@@ -645,8 +649,6 @@ self =>
case _ => false
}
- def isTypeIntro: Boolean = isTypeIntroToken(in.token)
-
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
def isStatSep(token: Int): Boolean =
@@ -657,31 +659,10 @@ self =>
/* --------- COMMENT AND ATTRIBUTE COLLECTION ----------------------------- */
- /** Join the comment associated with a definition. */
- def joinComment(trees: => List[Tree]): List[Tree] = {
- val doc = in.flushDoc
- if ((doc ne null) && doc.raw.length > 0) {
- val joined = trees map {
- t =>
- DocDef(doc, t) setPos {
- if (t.pos.isDefined) {
- val pos = doc.pos.withEnd(t.pos.endOrPoint)
- // always make the position transparent
- pos.makeTransparent
- } else {
- t.pos
- }
- }
- }
- joined.find(_.pos.isOpaqueRange) foreach {
- main =>
- val mains = List(main)
- joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
- }
- joined
- }
- else trees
- }
+ /** A hook for joining the comment associated with a definition.
+ * Overridden by scaladoc.
+ */
+ def joinComment(trees: => List[Tree]): List[Tree] = trees
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
@@ -694,6 +675,9 @@ self =>
def atPos[T <: Tree](pos: Position)(t: T): T =
global.atPos(pos)(t)
+ def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
+ def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
+
/** Convert tree to formal parameter list. */
def convertToParams(tree: Tree): List[ValDef] = tree match {
case Parens(ts) => ts map convertToParam
@@ -708,12 +692,12 @@ self =>
tree match {
case Ident(name) =>
removeAsPlaceholder(name)
- makeParam(name, TypeTree() setPos o2p(tree.pos.endOrPoint))
+ makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.endOrPoint))
case Typed(Ident(name), tpe) if tpe.isType => // get the ident!
removeAsPlaceholder(name)
- makeParam(name, tpe)
+ makeParam(name.toTermName, tpe)
case _ =>
- syntaxError(tree.pos, "not a legal formal parameter", false)
+ syntaxError(tree.pos, "not a legal formal parameter", skipIt = false)
makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.endOrPoint))
}
}
@@ -721,7 +705,7 @@ self =>
/** Convert (qual)ident to type identifier. */
def convertToTypeId(tree: Tree): Tree = atPos(tree.pos) {
convertToTypeName(tree) getOrElse {
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
@@ -770,14 +754,10 @@ self =>
}
}
- def checkSize(kind: String, size: Int, max: Int) {
- if (size > max) syntaxError("too many "+kind+", maximum = "+max, false)
- }
-
def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
if (treeInfo.isLeftAssoc(op) != leftAssoc)
syntaxError(
- offset, "left- and right-associative operators with same precedence may not be mixed", false)
+ offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
def reduceStack(isExpr: Boolean, base: List[OpInfo], top0: Tree, prec: Int, leftAssoc: Boolean): Tree = {
var top = top0
@@ -794,7 +774,7 @@ self =>
val rPos = top.pos
val end = if (rPos.isDefined) rPos.endOrPoint else opPos.endOrPoint
top = atPos(start, opinfo.offset, end) {
- makeBinop(isExpr, opinfo.operand, opinfo.operator, top, opPos)
+ makeBinop(isExpr, opinfo.operand, opinfo.operator.toTermName, top, opPos)
}
}
top
@@ -918,12 +898,12 @@ self =>
* }}}
*/
def compoundType(): Tree = compoundTypeRest(
- if (in.token == LBRACE) atPos(o2p(in.offset))(scalaAnyRefConstr)
+ if (in.token == LBRACE) atInPos(scalaAnyRefConstr)
else annotType()
)
def compoundTypeRest(t: Tree): Tree = {
- var ts = new ListBuffer[Tree] += t
+ val ts = new ListBuffer[Tree] += t
while (in.token == WITH) {
in.nextToken()
ts += annotType()
@@ -979,12 +959,11 @@ self =>
}
/** Assumed (provisionally) to be TermNames. */
- def ident(skipIt: Boolean): Name =
+ def ident(skipIt: Boolean): Name = (
if (isIdent) rawIdent().encode
- else {
- syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
- nme.ERROR
- }
+ else syntaxErrorOrIncompleteAnd(expectedMsg(IDENTIFIER), skipIt)(nme.ERROR)
+ )
+
def ident(): Name = ident(skipIt = true)
def rawIdent(): Name = try in.name finally in.nextToken()
@@ -1108,44 +1087,27 @@ self =>
* | null
* }}}
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
- atPos(start) {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
- if (in.token == SYMBOLLIT)
- Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else if (in.token == INTERPOLATIONID)
- interpolatedString(inPattern = inPattern)
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT | STRINGPART => in.strVal.intern()
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
- null
- })
- }
- }
-
- private def stringOp(t: Tree, op: TermName) = {
- val str = in.strVal
- in.nextToken()
- if (str.length == 0) t
- else atPos(t.pos.startOrPoint) {
- Apply(Select(t, op), List(Literal(Constant(str))))
- }
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = atPos(start) {
+ def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
+ if (in.token == SYMBOLLIT)
+ Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+ else if (in.token == INTERPOLATIONID)
+ interpolatedString(inPattern = inPattern)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ => syntaxErrorOrIncompleteAnd("illegal literal", skipIt = true)(null)
+ })
}
- private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
+ private def interpolatedString(inPattern: Boolean): Tree = atPos(in.offset) {
val start = in.offset
val interpolator = in.name
@@ -1154,18 +1116,15 @@ self =>
in.nextToken()
while (in.token == STRINGPART) {
partsBuf += literal()
- exprBuf += {
+ exprBuf += (
if (inPattern) dropAnyBraces(pattern())
- else {
- if (in.token == IDENTIFIER) atPos(in.offset)(Ident(ident()))
- else if(in.token == LBRACE) expr()
- else if(in.token == THIS) { in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY)) }
- else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected", true)
- EmptyTree
- }
+ else in.token match {
+ case IDENTIFIER => atPos(in.offset)(Ident(ident()))
+ case LBRACE => expr()
+ case THIS => in.nextToken(); atPos(in.offset)(This(tpnme.EMPTY))
+ case _ => syntaxErrorOrIncompleteAnd("error in interpolated string: identifier or block expected", skipIt = true)(EmptyTree)
}
- }
+ )
}
if (in.token == STRINGLIT) partsBuf += literal()
@@ -1229,15 +1188,6 @@ self =>
/* ----------- EXPRESSIONS ------------------------------------------------ */
- /** {{{
- * EqualsExpr ::= `=' Expr
- * }}}
- */
- def equalsExpr(): Tree = {
- accept(EQUALS)
- expr()
- }
-
def condExpr(): Tree = {
if (in.token == LPAREN) {
in.nextToken()
@@ -1246,7 +1196,7 @@ self =>
r
} else {
accept(LPAREN)
- Literal(Constant(true))
+ newLiteral(true)
}
}
@@ -1281,7 +1231,7 @@ self =>
def expr(): Tree = expr(Local)
def expr(location: Int): Tree = {
- var savedPlaceholderParams = placeholderParams
+ val savedPlaceholderParams = placeholderParams
placeholderParams = List()
var res = expr0(location)
if (!placeholderParams.isEmpty && !isWildcard(res)) {
@@ -1300,7 +1250,7 @@ self =>
newLinesOpt()
val thenp = expr()
val elsep = if (in.token == ELSE) { in.nextToken(); expr() }
- else Literal(Constant())
+ else literalUnit
If(cond, thenp, elsep)
}
parseIf
@@ -1342,14 +1292,13 @@ self =>
parseWhile
case DO =>
def parseDo = {
- val start = in.offset
atPos(in.skipToken()) {
val lname: Name = freshTermName(nme.DO_WHILE_PREFIX)
val body = expr()
if (isStatSep) in.nextToken()
accept(WHILE)
val cond = condExpr()
- makeDoWhile(lname, body, cond)
+ makeDoWhile(lname.toTermName, body, cond)
}
}
parseDo
@@ -1375,7 +1324,7 @@ self =>
case RETURN =>
def parseReturn =
atPos(in.skipToken()) {
- Return(if (isExprIntro) expr() else Literal(Constant()))
+ Return(if (isExprIntro) expr() else literalUnit)
}
parseReturn
case THROW =>
@@ -1407,7 +1356,7 @@ self =>
Typed(t, atPos(uscorePos) { Ident(tpnme.WILDCARD_STAR) })
}
} else {
- syntaxErrorOrIncomplete("`*' expected", true)
+ syntaxErrorOrIncomplete("`*' expected", skipIt = true)
}
} else if (in.token == AT) {
t = (t /: annotations(skipNewLines = false))(makeAnnotated)
@@ -1507,7 +1456,7 @@ self =>
def prefixExpr(): Tree = {
if (isUnaryOp) {
atPos(in.offset) {
- val name = nme.toUnaryName(rawIdent())
+ val name = nme.toUnaryName(rawIdent().toTermName)
if (name == nme.UNARY_- && isNumericLit)
simpleExprRest(literal(isNegated = true), canApply = true)
else
@@ -1545,11 +1494,11 @@ self =>
val pname = freshName("x$")
in.nextToken()
val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ makeSyntheticParam(pname) }
+ val param = atPos(id.pos.focus){ makeSyntheticParam(pname.toTermName) }
placeholderParams = param :: placeholderParams
id
case LPAREN =>
- atPos(in.offset)(makeParens(commaSeparated(expr)))
+ atPos(in.offset)(makeParens(commaSeparated(expr())))
case LBRACE =>
canApply = false
blockExpr()
@@ -1558,12 +1507,11 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, argss, self, stats) = template(isTrait = false)
+ val (parents, self, stats) = template()
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, argss, npos, cpos)
+ makeNew(parents, self, stats, npos, cpos)
case _ =>
- syntaxErrorOrIncomplete("illegal start of simple expression", true)
- errorTermTree
+ syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
}
simpleExprRest(t, canApply = canApply)
}
@@ -1614,14 +1562,9 @@ self =>
* }}}
*/
def argumentExprs(): List[Tree] = {
- def args(): List[Tree] = commaSeparated {
- val maybeNamed = isIdent
- expr() match {
- case a @ Assign(id, rhs) if maybeNamed =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- }
- }
+ def args(): List[Tree] = commaSeparated(
+ if (isIdent) treeInfo.assignmentToMaybeNamedArg(expr()) else expr()
+ )
in.token match {
case LBRACE => List(blockExpr())
case LPAREN => inParens(if (in.token == RPAREN) Nil else args())
@@ -1805,7 +1748,6 @@ self =>
* }}}
*/
def pattern2(): Tree = {
- val nameOffset = in.offset
val p = pattern3()
if (in.token != AT) p
@@ -1889,8 +1831,7 @@ self =>
} else {
badStart
}
- syntaxErrorOrIncomplete(msg, skip)
- errorPatternTree
+ syntaxErrorOrIncompleteAnd(msg, skip)(errorPatternTree)
}
/** {{{
@@ -1906,19 +1847,15 @@ self =>
*
* XXX: Hook for IDE
*/
- def simplePattern(): Tree = {
+ def simplePattern(): Tree = (
// simple diagnostics for this entry point
- def badStart(): Tree = {
- syntaxErrorOrIncomplete("illegal start of simple pattern", true)
- errorPatternTree
- }
- simplePattern(badStart)
- }
+ simplePattern(() => syntaxErrorOrIncompleteAnd("illegal start of simple pattern", skipIt = true)(errorPatternTree))
+ )
def simplePattern(onError: () => Tree): Tree = {
val start = in.offset
in.token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS =>
- var t = stableId()
+ val t = stableId()
in.token match {
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
@@ -1980,7 +1917,6 @@ self =>
/** Default entry points into some pattern contexts. */
def pattern(): Tree = noSeq.pattern()
- def patterns(): List[Tree] = noSeq.patterns()
def seqPatterns(): List[Tree] = seqOK.patterns()
def xmlSeqPatterns(): List[Tree] = xmlSeqOK.patterns() // Called from xml parser
def argumentPatterns(): List[Tree] = inParens {
@@ -1994,16 +1930,16 @@ self =>
/** Drop `private` modifier when followed by a qualifier.
* Contract `abstract` and `override` to ABSOVERRIDE
*/
- private def normalize(mods: Modifiers): Modifiers =
+ private def normalizeModifers(mods: Modifiers): Modifiers =
if (mods.isPrivate && mods.hasAccessBoundary)
- normalize(mods &~ Flags.PRIVATE)
+ normalizeModifers(mods &~ Flags.PRIVATE)
else if (mods hasAllFlags (Flags.ABSTRACT | Flags.OVERRIDE))
- normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
+ normalizeModifers(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
private def addMod(mods: Modifiers, mod: Long, pos: Position): Modifiers = {
- if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", false)
+ if (mods hasFlag mod) syntaxError(in.offset, "repeated modifier", skipIt = false)
in.nextToken()
(mods | mod) withPosition (mod, pos)
}
@@ -2020,7 +1956,7 @@ self =>
if (in.token == LBRACKET) {
in.nextToken()
if (mods.hasAccessBoundary)
- syntaxError("duplicate private/protected qualifier", false)
+ syntaxError("duplicate private/protected qualifier", skipIt = false)
result = if (in.token == THIS) { in.nextToken(); mods | Flags.LOCAL }
else Modifiers(mods.flags, identForType())
accept(RBRACKET)
@@ -2043,7 +1979,7 @@ self =>
* AccessModifier ::= (private | protected) [AccessQualifier]
* }}}
*/
- def accessModifierOpt(): Modifiers = normalize {
+ def accessModifierOpt(): Modifiers = normalizeModifers {
in.token match {
case m @ (PRIVATE | PROTECTED) => in.nextToken() ; accessQualifierOpt(Modifiers(flagTokens(m)))
case _ => NoMods
@@ -2057,7 +1993,7 @@ self =>
* | override
* }}}
*/
- def modifiers(): Modifiers = normalize {
+ def modifiers(): Modifiers = normalizeModifers {
def loop(mods: Modifiers): Modifiers = in.token match {
case PRIVATE | PROTECTED =>
loop(accessQualifierOpt(addMod(mods, flagTokens(in.token), tokenRange(in))))
@@ -2102,7 +2038,7 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, ListOfNil)
+ else New(t, Nil)
}
/* -------- PARAMETERS ------------------------------------------- */
@@ -2127,10 +2063,10 @@ self =>
var mods = Modifiers(Flags.PARAM)
if (owner.isTypeName) {
mods = modifiers() | Flags.PARAMACCESSOR
- if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", false)
+ if (mods.isLazy) syntaxError("lazy modifier not allowed here. Use call-by-name parameters instead", skipIt = false)
in.token match {
case v @ (VAL | VAR) =>
- mods = mods withPosition (in.token, tokenRange(in))
+ mods = mods withPosition (in.token.toLong, tokenRange(in))
if (v == VAR) mods |= Flags.MUTABLE
in.nextToken()
case _ =>
@@ -2143,7 +2079,7 @@ self =>
val name = ident()
var bynamemod = 0
val tpt =
- if (settings.YmethodInfer.value && !owner.isTypeName && in.token != COLON) {
+ if (settings.YmethodInfer && !owner.isTypeName && in.token != COLON) {
TypeTree()
} else { // XX-METHOD-INFER
accept(COLON)
@@ -2152,11 +2088,11 @@ self =>
syntaxError(
in.offset,
(if (mods.isMutable) "`var'" else "`val'") +
- " parameters may not be call-by-name", false)
+ " parameters may not be call-by-name", skipIt = false)
else if (implicitmod != 0)
syntaxError(
in.offset,
- "implicit parameters may not be call-by-name", false)
+ "implicit parameters may not be call-by-name", skipIt = false)
else bynamemod = Flags.BYNAMEPARAM
}
paramType()
@@ -2168,7 +2104,7 @@ self =>
expr()
} else EmptyTree
atPos(start, if (name == nme.ERROR) start else nameOffset) {
- ValDef((mods | implicitmod | bynamemod) withAnnotations annots, name, tpt, default)
+ ValDef((mods | implicitmod.toLong | bynamemod) withAnnotations annots, name.toTermName, tpt, default)
}
}
def paramClause(): List[ValDef] = {
@@ -2185,8 +2121,8 @@ self =>
val start = in.offset
newLineOptWhenFollowedBy(LPAREN)
if (ofCaseClass && in.token != LPAREN)
- deprecationWarning(in.lastOffset, "case classes without a parameter list have been deprecated;\n"+
- "use either case objects or case classes with `()' as parameter list.")
+ syntaxError(in.lastOffset, "case classes without a parameter list are not allowed;\n"+
+ "use either case objects or case classes with an explicit `()' as a parameter list.")
while (implicitmod == 0 && in.token == LPAREN) {
in.nextToken()
vds += paramClause()
@@ -2197,9 +2133,9 @@ self =>
val result = vds.toList
if (owner == nme.CONSTRUCTOR && (result.isEmpty || (result.head take 1 exists (_.mods.isImplicit)))) {
in.token match {
- case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", false)
+ case LBRACKET => syntaxError(in.offset, "no type parameters allowed here", skipIt = false)
case EOF => incompleteInputError("auxiliary constructor needs non-implicit parameter list")
- case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", false)
+ case _ => syntaxError(start, "auxiliary constructor needs non-implicit parameter list", skipIt = false)
}
}
addEvidenceParams(owner, result, contextBounds)
@@ -2280,16 +2216,18 @@ self =>
* }}}
*/
def typeBounds(): TypeBoundsTree = {
- val t = TypeBoundsTree(
- bound(SUPERTYPE, tpnme.Nothing),
- bound(SUBTYPE, tpnme.Any)
- )
- t setPos wrappingPos(List(t.hi, t.lo))
+ val lo = bound(SUPERTYPE)
+ val hi = bound(SUBTYPE)
+ val t = TypeBoundsTree(lo, hi)
+ val defined = List(t.hi, t.lo) filter (_.pos.isDefined)
+
+ if (defined.nonEmpty)
+ t setPos wrappingPos(defined)
+ else
+ t setPos o2p(in.offset)
}
- def bound(tok: Int, default: TypeName): Tree =
- if (in.token == tok) { in.nextToken(); typ() }
- else atPos(o2p(in.lastOffset)) { rootScalaDot(default) }
+ def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2323,8 +2261,8 @@ self =>
accept(DOT)
result
}
- /** Walks down import `foo.bar.baz.{ ... }` until it ends at a
- * an underscore, a left brace, or an undotted identifier.
+ /* Walks down import `foo.bar.baz.{ ... }` until it ends at a
+ * an underscore, a left brace, or an undotted identifier.
*/
def loop(expr: Tree): Tree = {
expr setPos expr.pos.makeTransparent
@@ -2410,7 +2348,7 @@ self =>
*/
def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
- syntaxError("lazy not allowed here. Only vals can be lazy", false)
+ syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
case VAL =>
patDefOrDcl(pos, mods withPosition(VAL, tokenRange(in)))
@@ -2468,8 +2406,8 @@ self =>
if (newmods.isDeferred) {
trees match {
case List(ValDef(_, _, _, EmptyTree)) =>
- if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", false)
- case _ => syntaxError(p.pos, "pattern definition may not be abstract", false)
+ if (mods.isLazy) syntaxError(p.pos, "lazy values may not be abstract", skipIt = false)
+ case _ => syntaxError(p.pos, "pattern definition may not be abstract", skipIt = false)
}
}
trees
@@ -2519,7 +2457,7 @@ self =>
* }}}
*/
def funDefOrDcl(start : Int, mods: Modifiers): Tree = {
- in.nextToken
+ in.nextToken()
if (in.token == THIS) {
atPos(start, in.skipToken()) {
val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
@@ -2582,7 +2520,7 @@ self =>
*/
def constrExpr(vparamss: List[List[ValDef]]): Tree =
if (in.token == LBRACE) constrBlock(vparamss)
- else Block(List(selfInvocation(vparamss)), Literal(Constant()))
+ else Block(selfInvocation(vparamss) :: Nil, literalUnit)
/** {{{
* SelfInvocation ::= this ArgumentExprs {ArgumentExprs}
@@ -2612,7 +2550,7 @@ self =>
else Nil
}
accept(RBRACE)
- Block(stats, Literal(Constant()))
+ Block(stats, literalUnit)
}
/** {{{
@@ -2625,7 +2563,6 @@ self =>
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
- val nameOffset = in.offset
val name = identForType()
// @M! a type alias as well as an abstract type may declare type parameters
val tparams = typeParamClauseOpt(name, null)
@@ -2636,8 +2573,7 @@ self =>
case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE =>
TypeDef(mods | Flags.DEFERRED, name, tparams, typeBounds())
case _ =>
- syntaxErrorOrIncomplete("`=', `>:', or `<:' expected", true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("`=', `>:', or `<:' expected", skipIt = true)(EmptyTree)
}
}
}
@@ -2657,7 +2593,7 @@ self =>
* }}}
*/
def tmplDef(pos: Int, mods: Modifiers): Tree = {
- if (mods.isLazy) syntaxError("classes cannot be lazy", false)
+ if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
classDef(pos, (mods | Flags.TRAIT | Flags.ABSTRACT) withPosition (Flags.TRAIT, tokenRange(in)))
@@ -2670,8 +2606,7 @@ self =>
case CASEOBJECT =>
objectDef(pos, (mods | Flags.CASE) withPosition (Flags.CASE, tokenRange(in.prev /*scanner skips on 'case' to 'object', thus take prev*/)))
case _ =>
- syntaxErrorOrIncomplete("expected start of definition", true)
- EmptyTree
+ syntaxErrorOrIncompleteAnd("expected start of definition", skipIt = true)(EmptyTree)
}
}
@@ -2682,7 +2617,7 @@ self =>
* }}}
*/
def classDef(start: Int, mods: Modifiers): ClassDef = {
- in.nextToken
+ in.nextToken()
val nameOffset = in.offset
val name = identForType()
atPos(start, if (name == tpnme.ERROR) start else nameOffset) {
@@ -2692,7 +2627,7 @@ self =>
classContextBounds = contextBoundBuf.toList
val tstart = (in.offset :: classContextBounds.map(_.pos.startOrPoint)).min
if (!classContextBounds.isEmpty && mods.isTrait) {
- syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", false)
+ syntaxError("traits cannot have type parameters with context bounds `: ...' nor view bounds `<% ...'", skipIt = false)
classContextBounds = List()
}
val constrAnnots = constructorAnnotations()
@@ -2703,7 +2638,7 @@ self =>
if (mods.isTrait) {
if (settings.YvirtClasses && in.token == SUBTYPE) mods1 |= Flags.DEFERRED
} else if (in.token == SUBTYPE) {
- syntaxError("classes are not allowed to be virtual", false)
+ syntaxError("classes are not allowed to be virtual", skipIt = false)
}
val template = templateOpt(mods1, name, constrMods withAnnotations constrAnnots, vparamss, tstart)
if (isInterface(mods1, template.body)) mods1 |= Flags.INTERFACE
@@ -2722,36 +2657,67 @@ self =>
* }}}
*/
def objectDef(start: Int, mods: Modifiers): ModuleDef = {
- in.nextToken
+ in.nextToken()
val nameOffset = in.offset
val name = ident()
val tstart = in.offset
atPos(start, if (name == nme.ERROR) start else nameOffset) {
val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods
val template = templateOpt(mods1, name, NoMods, Nil, tstart)
- ModuleDef(mods1, name, template)
+ ModuleDef(mods1, name.toTermName, template)
}
}
+ /** Create a tree representing a package object, converting
+ * {{{
+ * package object foo { ... }
+ * }}}
+ * to
+ * {{{
+ * package foo {
+ * object `package` { ... }
+ * }
+ * }}}
+ */
+ def packageObjectDef(start: Offset): PackageDef = {
+ val defn = objectDef(in.offset, NoMods)
+ val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
+ val pid = atPos(o2p(defn.pos.startOrPoint))(Ident(defn.name))
+
+ makePackaging(start, pid, module :: Nil)
+ }
+ def packageOrPackageObject(start: Offset): Tree = (
+ if (in.token == OBJECT)
+ joinComment(packageObjectDef(start) :: Nil).head
+ else {
+ in.flushDoc
+ makePackaging(start, pkgQualId(), inBracesOrNil(topStatSeq()))
+ }
+ )
+ // TODO - eliminate this and use "def packageObjectDef" (see call site of this
+ // method for small elaboration.)
+ def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
+ case ModuleDef(mods, name, impl) =>
+ makePackaging(
+ start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
+ }
+
/** {{{
* ClassParents ::= AnnotType {`(' [Exprs] `)'} {with AnnotType}
* TraitParents ::= AnnotType {with AnnotType}
* }}}
*/
- def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
- val parents = new ListBuffer[Tree] += startAnnotType()
- val argss = (
- // TODO: the insertion of ListOfNil here is where "new Foo" becomes
- // indistinguishable from "new Foo()".
- if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
- else ListOfNil
- )
-
- while (in.token == WITH) {
- in.nextToken()
- parents += startAnnotType()
+ def templateParents(): List[Tree] = {
+ val parents = new ListBuffer[Tree]
+ def readAppliedParent() = {
+ val start = in.offset
+ val parent = startAnnotType()
+ val argss = if (in.token == LPAREN) multipleArgumentExprs() else Nil
+ parents += atPos(start)((parent /: argss)(Apply.apply))
}
- (parents.toList, argss)
+ readAppliedParent()
+ while (in.token == WITH) { in.nextToken(); readAppliedParent() }
+ parents.toList
}
/** {{{
@@ -2761,33 +2727,33 @@ self =>
* EarlyDef ::= Annotations Modifiers PatDef
* }}}
*/
- def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
+ def template(): (List[Tree], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
- if (in.token == WITH && self.isEmpty) {
+ if (in.token == WITH && (self eq emptyValDef)) {
val earlyDefs: List[Tree] = body flatMap {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
case tdef @ TypeDef(mods, name, tparams, rhs) =>
List(treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs))
case stat if !stat.isEmpty =>
- syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", false)
+ syntaxError(stat.pos, "only type definitions and concrete field definitions allowed in early object initialization section", skipIt = false)
List()
case _ => List()
}
in.nextToken()
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self1, earlyDefs ::: body1)
+ val parents = templateParents()
+ val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self1, earlyDefs ::: body1)
} else {
- (List(), ListOfNil, self, body)
+ (List(), self, body)
}
} else {
- val (parents, argss) = templateParents(isTrait = isTrait)
- val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
- (parents, argss, self, body)
+ val parents = templateParents()
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
+ (parents, self, body)
}
}
@@ -2801,28 +2767,28 @@ self =>
* }}}
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, argss, self, body) = (
+ val (parents0, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template(isTrait = mods.isTrait)
+ template()
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(traitParentSeen = false)
- (List(), ListOfNil, self, body)
+ val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
+ (List(), self, body)
}
)
def anyrefParents() = {
val caseParents = if (mods.isCase) List(productConstr, serializableConstr) else Nil
parents0 ::: caseParents match {
- case Nil => List(atPos(o2p(in.offset))(scalaAnyRefConstr))
+ case Nil => atInPos(scalaAnyRefConstr) :: Nil
case ps => ps
}
}
def anyvalConstructor() = (
// Not a well-formed constructor, has to be finished later - see note
// regarding AnyVal constructor in AddInterfaces.
- DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, Literal(Constant())))
+ DefDef(NoMods, nme.CONSTRUCTOR, Nil, ListOfNil, TypeTree(), Block(Nil, literalUnit))
)
val tstart0 = if (body.isEmpty && in.lastOffset < tstart) in.lastOffset else tstart
@@ -2831,7 +2797,7 @@ self =>
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
Template(parents0, self, anyvalConstructor :: body)
else
- Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
+ Template(anyrefParents(), self, constrMods, vparamss, body, o2p(tstart))
}
}
@@ -2846,14 +2812,15 @@ self =>
case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
- def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
+ def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
templateBody(isPre = false)
} else {
- if (in.token == LPAREN)
- syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
- " may not have parameters", true)
+ if (in.token == LPAREN) {
+ if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", skipIt = true)
+ else abort("unexpected opening parenthesis")
+ }
(emptyValDef, List())
}
}
@@ -2870,43 +2837,10 @@ self =>
def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
-/*
- pkg match {
- case id @ Ident(_) =>
- PackageDef(id, stats)
- case Select(qual, name) => // drop this to flatten packages
- makePackaging(start, qual, List(PackageDef(Ident(name), stats)))
- }
- }
-*/
- /** Create a tree representing a package object, converting
- * {{{
- * package object foo { ... }
- * }}}
- * to
- * {{{
- * package foo {
- * object `package` { ... }
- * }
- * }}}
- */
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
- case ModuleDef(mods, name, impl) =>
- makePackaging(
- start, atPos(o2p(objDef.pos.startOrPoint)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
- }
-
- /** {{{
- * Packaging ::= package QualId [nl] `{' TopStatSeq `}'
- * }}}
- */
- def packaging(start: Int): Tree = {
- val nameOffset = in.offset
- val pkg = pkgQualId()
- val stats = inBracesOrNil(topStatSeq())
- makePackaging(start, pkg, stats)
- }
+ def makeEmptyPackage(start: Int, stats: List[Tree]): PackageDef = (
+ makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
+ )
/** {{{
* TopStatSeq ::= TopStat {semi TopStat}
@@ -2922,22 +2856,15 @@ self =>
while (!isStatSeqEnd) {
stats ++= (in.token match {
case PACKAGE =>
- val start = in.skipToken()
- if (in.token == OBJECT)
- joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
- else {
- in.flushDoc
- List(packaging(start))
- }
+ packageOrPackageObject(in.skipToken()) :: Nil
case IMPORT =>
in.flushDoc
importClause()
case x if x == AT || isTemplateIntro || isModifier =>
- joinComment(List(topLevelTmplDef))
+ joinComment(topLevelTmplDef :: Nil)
case _ =>
- if (!isStatSep)
- syntaxErrorOrIncomplete("expected class or object definition", true)
- Nil
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd("expected class or object definition", skipIt = true)(Nil)
})
acceptStatSepOpt()
}
@@ -2995,7 +2922,7 @@ self =>
} else if (isDefIntro || isModifier || in.token == AT) {
stats ++= joinComment(nonLocalDefOrDcl)
} else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", true)
+ syntaxErrorOrIncomplete("illegal start of definition", skipIt = true)
}
acceptStatSepOpt()
}
@@ -3018,7 +2945,7 @@ self =>
syntaxErrorOrIncomplete(
"illegal start of declaration"+
(if (inFunReturnType) " (possible cause: missing `=' in front of current method body)"
- else ""), true)
+ else ""), skipIt = true)
}
if (in.token != RBRACE) acceptStatSep()
}
@@ -3042,13 +2969,13 @@ self =>
def localDef(implicitMod: Int): List[Tree] = {
val annots = annotations(skipNewLines = true)
val pos = in.offset
- val mods = (localModifiers() | implicitMod) withAnnotations annots
+ val mods = (localModifiers() | implicitMod.toLong) withAnnotations annots
val defs =
if (!(mods hasFlag ~(Flags.IMPLICIT | Flags.LAZY))) defOrDcl(pos, mods)
else List(tmplDef(pos, mods))
in.token match {
- case RBRACE | CASE => defs :+ (Literal(Constant()) setPos o2p(in.offset))
+ case RBRACE | CASE => defs :+ setInPos(literalUnit)
case _ => defs
}
}
@@ -3088,7 +3015,7 @@ self =>
}
else {
val addendum = if (isModifier) " (no modifiers allowed here)" else ""
- syntaxErrorOrIncomplete("illegal start of statement" + addendum, true)
+ syntaxErrorOrIncomplete("illegal start of statement" + addendum, skipIt = true)
}
}
stats.toList
@@ -3098,7 +3025,7 @@ self =>
* CompilationUnit ::= {package QualId semi} TopStatSeq
* }}}
*/
- def compilationUnit(): Tree = checkNoEscapingPlaceholders {
+ def compilationUnit(): PackageDef = checkNoEscapingPlaceholders {
def topstats(): List[Tree] = {
val ts = new ListBuffer[Tree]
while (in.token == SEMI) in.nextToken()
@@ -3106,13 +3033,15 @@ self =>
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
+ // TODO - this next line is supposed to be
+ // ts += packageObjectDef(start)
+ // but this broke a scaladoc test (run/diagrams-filtering.scala) somehow.
ts ++= joinComment(List(makePackageObject(start, objectDef(in.offset, NoMods))))
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
}
} else {
- val nameOffset = in.offset
in.flushDoc
val pkg = pkgQualId()
@@ -3135,8 +3064,8 @@ self =>
resetPackage()
topstats() match {
- case List(stat @ PackageDef(_, _)) => stat
- case stats =>
+ case (stat @ PackageDef(_, _)) :: Nil => stat
+ case stats =>
val start =
if (stats forall (_ == EmptyTree)) 0
else {
@@ -3145,7 +3074,7 @@ self =>
else 0
}
- makePackaging(start, atPos(start, start, start) { Ident(nme.EMPTY_PACKAGE_NAME) }, stats)
+ makeEmptyPackage(start, stats)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 1aa50be83a..82a3144304 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -9,9 +9,11 @@ import scala.tools.nsc.util.CharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
-import scala.annotation.switch
-import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
+import scala.annotation.{ switch, tailrec }
+import scala.collection.{ mutable, immutable }
+import mutable.{ ListBuffer, ArrayBuffer }
import scala.xml.Utility.{ isNameStart }
+import scala.language.postfixOps
/** See Parsers.scala / ParsersCommon for some explanation of ScannersCommon.
*/
@@ -26,7 +28,6 @@ trait ScannersCommon {
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def warning(off: Int, msg: String): Unit
def error (off: Int, msg: String): Unit
def incompleteInputError(off: Int, msg: String): Unit
def deprecationWarning(off: Int, msg: String): Unit
@@ -50,9 +51,6 @@ trait Scanners extends ScannersCommon {
/** Offset into source character array */
type Offset = Int
- /** An undefined offset */
- val NoOffset: Offset = -1
-
trait TokenData extends CommonTokenData {
/** the next token */
@@ -86,9 +84,70 @@ trait Scanners extends ScannersCommon {
abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
- def isAtEnd = charOffset >= buf.length
+ private var openComments = 0
+ protected def putCommentChar(): Unit = nextChar()
+
+ @tailrec private def skipLineComment(): Unit = ch match {
+ case SU | CR | LF =>
+ case _ => nextChar() ; skipLineComment()
+ }
+ private def maybeOpen() {
+ putCommentChar()
+ if (ch == '*') {
+ putCommentChar()
+ openComments += 1
+ }
+ }
+ private def maybeClose(): Boolean = {
+ putCommentChar()
+ (ch == '/') && {
+ putCommentChar()
+ openComments -= 1
+ openComments == 0
+ }
+ }
+ @tailrec final def skipNestedComments(): Unit = ch match {
+ case '/' => maybeOpen() ; skipNestedComments()
+ case '*' => if (!maybeClose()) skipNestedComments()
+ case SU => incompleteInputError("unclosed comment")
+ case _ => putCommentChar() ; skipNestedComments()
+ }
+ def skipDocComment(): Unit = skipNestedComments()
+ def skipBlockComment(): Unit = skipNestedComments()
- def flush = { charOffset = offset; nextChar(); this }
+ private def skipToCommentEnd(isLineComment: Boolean) {
+ nextChar()
+ if (isLineComment) skipLineComment()
+ else {
+ openComments = 1
+ val isDocComment = (ch == '*') && { nextChar(); true }
+ if (isDocComment) {
+ // Check for the amazing corner case of /**/
+ if (ch == '/')
+ nextChar()
+ else
+ skipDocComment()
+ }
+ else skipBlockComment()
+ }
+ }
+
+ /** @pre ch == '/'
+ * Returns true if a comment was skipped.
+ */
+ def skipComment(): Boolean = ch match {
+ case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
+ case _ => false
+ }
+ def flushDoc(): DocComment = null
+
+ /** To prevent doc comments attached to expressions from leaking out of scope
+ * onto the next documentable entity, they are discarded upon passing a right
+ * brace, bracket, or parenthesis.
+ */
+ def discardDocBuffer(): Unit = ()
+
+ def isAtEnd = charOffset >= buf.length
def resume(lastCode: Int) = {
token = lastCode
@@ -98,10 +157,6 @@ trait Scanners extends ScannersCommon {
nextToken()
}
- /** the last error offset
- */
- var errOffset: Offset = NoOffset
-
/** A character buffer for literals
*/
val cbuf = new StringBuilder
@@ -139,22 +194,6 @@ trait Scanners extends ScannersCommon {
cbuf.clear()
}
- /** Should doc comments be built? */
- def buildDocs: Boolean = forScaladoc
-
- /** holder for the documentation comment
- */
- var docComment: DocComment = null
-
- def flushDoc: DocComment = {
- val ret = docComment
- docComment = null
- ret
- }
-
- protected def foundComment(value: String, start: Int, end: Int) = ()
- protected def foundDocComment(value: String, start: Int, end: Int) = ()
-
private class TokenData0 extends TokenData
/** we need one token lookahead and one token history
@@ -227,12 +266,15 @@ trait Scanners extends ScannersCommon {
case RBRACE =>
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
- if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docComment = null
+ if (!sepRegions.isEmpty)
+ sepRegions = sepRegions.tail
+
+ discardDocBuffer()
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docComment = null
+
+ discardDocBuffer()
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -262,11 +304,11 @@ trait Scanners extends ScannersCommon {
next.token = EMPTY
}
- /** Insert NEWLINE or NEWLINES if
- * - we are after a newline
- * - we are within a { ... } or on toplevel (wrt sepRegions)
- * - the current token can start a statement and the one before can end it
- * insert NEWLINES if we are past a blank line, NEWLINE otherwise
+ /* Insert NEWLINE or NEWLINES if
+ * - we are after a newline
+ * - we are within a { ... } or on toplevel (wrt sepRegions)
+ * - the current token can start a statement and the one before can end it
+ * insert NEWLINES if we are past a blank line, NEWLINE otherwise
*/
if (!applyBracePatch() && afterLineEnd() && inLastOfStat(lastToken) && inFirstOfStat(token) &&
(sepRegions.isEmpty || sepRegions.head == RBRACE)) {
@@ -375,7 +417,7 @@ trait Scanners extends ScannersCommon {
getOperatorRest()
}
}
- fetchLT
+ fetchLT()
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
@@ -399,20 +441,20 @@ trait Scanners extends ScannersCommon {
nextChar()
base = 16
} else {
- /**
+ /*
* What should leading 0 be in the future? It is potentially dangerous
* to let it be base-10 because of history. Should it be an error? Is
* there a realistic situation where one would need it?
*/
if (isDigit(ch)) {
- if (opt.future) syntaxError("Non-zero numbers may not have a leading zero.")
+ if (settings.future) syntaxError("Non-zero numbers may not have a leading zero.")
else deprecationWarning("Treating numbers with a leading zero as octal is deprecated.")
}
base = 8
}
getNumber()
}
- fetchZero
+ fetchZero()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
@@ -455,7 +497,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchDoubleQuote
+ fetchDoubleQuote()
case '\'' =>
def fetchSingleQuote() = {
nextChar()
@@ -474,7 +516,7 @@ trait Scanners extends ScannersCommon {
}
}
}
- fetchSingleQuote
+ fetchSingleQuote()
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
@@ -519,67 +561,11 @@ trait Scanners extends ScannersCommon {
nextChar()
getOperatorRest()
} else {
- syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'")
+ syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch.toInt)) + "'")
nextChar()
}
}
- fetchOther
- }
- }
-
- private def skipComment(): Boolean = {
-
- if (ch == '/' || ch == '*') {
-
- val comment = new StringBuilder("/")
- def appendToComment() = comment.append(ch)
-
- if (ch == '/') {
- do {
- appendToComment()
- nextChar()
- } while ((ch != CR) && (ch != LF) && (ch != SU))
- } else {
- docComment = null
- var openComments = 1
- appendToComment()
- nextChar()
- appendToComment()
- var buildingDocComment = false
- if (ch == '*' && buildDocs) {
- buildingDocComment = true
- }
- while (openComments > 0) {
- do {
- do {
- if (ch == '/') {
- nextChar(); appendToComment()
- if (ch == '*') {
- nextChar(); appendToComment()
- openComments += 1
- }
- }
- if (ch != '*' && ch != SU) {
- nextChar(); appendToComment()
- }
- } while (ch != '*' && ch != SU)
- while (ch == '*') {
- nextChar(); appendToComment()
- }
- } while (ch != '/' && ch != SU)
- if (ch == '/') nextChar()
- else incompleteInputError("unclosed comment")
- openComments -= 1
- }
-
- if (buildingDocComment)
- foundDocComment(comment.toString, offset, charOffset - 2)
- }
-
- foundComment(comment.toString, offset, charOffset - 2)
- true
- } else {
- false
+ fetchOther()
}
}
@@ -712,7 +698,7 @@ trait Scanners extends ScannersCommon {
}
}
- @annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
+ @scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
@@ -898,7 +884,7 @@ trait Scanners extends ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- charVal
+ charVal.toLong
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
@@ -926,7 +912,7 @@ trait Scanners extends ScannersCommon {
}
}
- def intVal: Long = intVal(false)
+ def intVal: Long = intVal(negated = false)
/** Convert current strVal, base to double value
*/
@@ -958,7 +944,7 @@ trait Scanners extends ScannersCommon {
}
}
- def floatVal: Double = floatVal(false)
+ def floatVal: Double = floatVal(negated = false)
def checkNoLetter() {
if (isIdentifierPart(ch) && ch >= ' ')
@@ -976,7 +962,7 @@ trait Scanners extends ScannersCommon {
}
token = INTLIT
- /** When we know for certain it's a number after using a touch of lookahead */
+ /* When we know for certain it's a number after using a touch of lookahead */
def restOfNumber() = {
putChar(ch)
nextChar()
@@ -1004,10 +990,10 @@ trait Scanners extends ScannersCommon {
val lookahead = lookaheadReader
val c = lookahead.getc()
- /** As of scala 2.11, it isn't a number unless c here is a digit, so
- * opt.future excludes the rest of the logic.
+ /* As of scala 2.11, it isn't a number unless c here is a digit, so
+ * settings.future.value excludes the rest of the logic.
*/
- if (opt.future && !isDigit(c))
+ if (settings.future && !isDigit(c))
return setStrVal()
val isDefinitelyNumber = (c: @switch) match {
@@ -1015,16 +1001,16 @@ trait Scanners extends ScannersCommon {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
true
- /** Backquoted idents like 22.`foo`. */
+ /* Backquoted idents like 22.`foo`. */
case '`' =>
return setStrVal() /** Note the early return */
- /** These letters may be part of a literal, or a method invocation on an Int.
+ /* These letters may be part of a literal, or a method invocation on an Int.
*/
case 'd' | 'D' | 'f' | 'F' =>
!isIdentifierPart(lookahead.getc())
- /** A little more special handling for e.g. 5e7 */
+ /* A little more special handling for e.g. 5e7 */
case 'e' | 'E' =>
val ch = lookahead.getc()
!isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
@@ -1061,7 +1047,6 @@ trait Scanners extends ScannersCommon {
def syntaxError(off: Offset, msg: String) {
error(off, msg)
token = ERROR
- errOffset = off
}
/** generate an error at the current token offset
@@ -1074,7 +1059,6 @@ trait Scanners extends ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(offset, msg)
token = EOF
- errOffset = offset
}
override def toString() = token match {
@@ -1236,10 +1220,9 @@ trait Scanners extends ScannersCommon {
*/
class SourceFileScanner(val source: SourceFile) extends Scanner {
val buf = source.content
- override val decodeUni: Boolean = !settings.nouescape.value
+ override val decodeUni: Boolean = !settings.nouescape
// suppress warnings, throw exception on errors
- def warning(off: Offset, msg: String): Unit = ()
def deprecationWarning(off: Offset, msg: String): Unit = ()
def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg)
@@ -1247,10 +1230,9 @@ trait Scanners extends ScannersCommon {
/** A scanner over a given compilation unit
*/
- class UnitScanner(unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
+ class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) {
def this(unit: CompilationUnit) = this(unit, List())
- override def warning(off: Offset, msg: String) = unit.warning(unit.position(off), msg)
override def deprecationWarning(off: Offset, msg: String) = unit.deprecationWarning(unit.position(off), msg)
override def error (off: Offset, msg: String) = unit.error(unit.position(off), msg)
override def incompleteInputError(off: Offset, msg: String) = unit.incompleteInputError(unit.position(off), msg)
@@ -1296,23 +1278,21 @@ trait Scanners extends ScannersCommon {
}
}
}
-
- override def foundComment(value: String, start: Int, end: Int) {
- val pos = new RangePosition(unit.source, start, start, end)
- unit.comment(pos, value)
- }
-
- override def foundDocComment(value: String, start: Int, end: Int) {
- val docPos = new RangePosition(unit.source, start, start, end)
- docComment = new DocComment(value, docPos)
- unit.comment(docPos, value)
- }
}
class ParensAnalyzer(unit: CompilationUnit, patches: List[BracePatch]) extends UnitScanner(unit, patches) {
- var balance = scala.collection.mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+ val balance = mutable.Map(RPAREN -> 0, RBRACKET -> 0, RBRACE -> 0)
+
+ /** The source code with braces and line starts annotated with [NN] showing the index */
+ private def markedSource = {
+ val code = unit.source.content
+ val braces = code.indices filter (idx => "{}\n" contains code(idx)) toSet;
+ val mapped = code.indices map (idx => if (braces(idx)) s"${code(idx)}[$idx]" else "" + code(idx))
+ mapped.mkString("")
+ }
init()
+ log(s"ParensAnalyzer for ${unit.source} of length ${unit.source.content.length}\n```\n$markedSource\n```")
/** The offset of the first token on this line, or next following line if blank
*/
@@ -1388,17 +1368,24 @@ trait Scanners extends ScannersCommon {
bpbuf += current
}
}
+ def bracePairString(bp: BracePair, indent: Int): String = {
+ val rangeString = {
+ import bp._
+ val lline = line(loff)
+ val rline = line(roff)
+ val tokens = List(lline, lindent, rline, rindent) map (n => if (n < 0) "??" else "" + n)
+ "%s:%s to %s:%s".format(tokens: _*)
+ }
+ val outer = (" " * indent) + rangeString
+ val inners = bp.nested map (bracePairString(_, indent + 2))
- def printBP(bp: BracePair, indent: Int) {
- println(" "*indent+line(bp.loff)+":"+bp.lindent+" to "+line(bp.roff)+":"+bp.rindent)
- if (bp.nested.nonEmpty)
- for (bp1 <- bp.nested) {
- printBP(bp1, indent + 2)
- }
+ if (inners.isEmpty) outer
+ else inners.mkString(outer + "\n", "\n", "")
}
-// println("lineStart = "+lineStart)//DEBUG
-// println("bracepairs = ")
-// for (bp <- bpbuf.toList) printBP(bp, 0)
+ def bpString = bpbuf.toList map ("\n" + bracePairString(_, 0)) mkString ""
+ def startString = lineStart.mkString("line starts: [", ", ", "]")
+
+ log(s"\n$startString\n$bpString")
bpbuf.toList
}
@@ -1432,18 +1419,6 @@ trait Scanners extends ScannersCommon {
else bp :: insertPatch(bps, patch)
}
- def leftColumn(offset: Int) =
- if (offset == -1) -1 else column(lineStart(line(offset)))
-
- def rightColumn(offset: Int, default: Int) =
- if (offset == -1) -1
- else {
- val rlin = line(offset)
- if (lineStart(rlin) == offset) column(offset)
- else if (rlin + 1 < lineStart.length) column(lineStart(rlin + 1))
- else default
- }
-
def insertRBrace(): List[BracePatch] = {
def insert(bps: List[BracePair]): List[BracePatch] = bps match {
case List() => patches
@@ -1458,7 +1433,7 @@ trait Scanners extends ScannersCommon {
while (lin < lineStart.length && column(lineStart(lin)) > lindent)
lin += 1
if (lin < lineStart.length) {
- val patches1 = insertPatch(patches, BracePatch(lineStart(lin), true))
+ val patches1 = insertPatch(patches, BracePatch(lineStart(lin), inserted = true))
//println("patch for "+bp+"/"+imbalanceMeasure+"/"+new ParensAnalyzer(unit, patches1).imbalanceMeasure)
/*if (improves(patches1))*/
patches1
@@ -1479,23 +1454,12 @@ trait Scanners extends ScannersCommon {
else {
val patches1 = delete(nested)
if (patches1 ne patches) patches1
- else insertPatch(patches, BracePatch(roff, false))
+ else insertPatch(patches, BracePatch(roff, inserted = false))
}
}
delete(bracePairs)
}
- def imbalanceMeasure: Int = {
- def measureList(bps: List[BracePair]): Int =
- (bps map measure).sum
- def measure(bp: BracePair): Int =
- (if (bp.lindent != bp.rindent) 1 else 0) + measureList(bp.nested)
- measureList(bracePairs)
- }
-
- def improves(patches1: List[BracePatch]): Boolean =
- imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
-
// don't emit deprecation warnings about identifiers like `macro` or `then`
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
index e8ef670222..f326212d5b 100755
--- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala
@@ -11,7 +11,6 @@ import scala.xml.{ EntityRef, Text }
import scala.xml.XML.{ xmlns }
import symtab.Flags.MUTABLE
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.language.implicitConversions
/** This class builds instance of `Tree` that represent XML.
*
@@ -133,7 +132,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
case (Some(pre), rest) => (const(pre), const(rest))
case _ => (wild, const(n))
}
- mkXML(pos, true, prepat, labpat, null, null, false, args)
+ mkXML(pos, isPattern = true, prepat, labpat, null, null, empty = false, args)
}
protected def convertToTextPat(t: Tree): Tree = t match {
@@ -169,7 +168,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
}
/** Returns (Some(prefix) | None, rest) based on position of ':' */
- def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', true) match {
+ def splitPrefix(name: String): (Option[String], String) = splitWhere(name, _ == ':', doDropIndex = true) match {
case Some((pre, rest)) => (Some(pre), rest)
case _ => (None, name)
}
@@ -197,7 +196,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
uri1
}
- /** Extract all the namespaces from the attribute map. */
+ /* Extract all the namespaces from the attribute map. */
val namespaces: List[Tree] =
for (z <- attrMap.keys.toList ; if z startsWith xmlns) yield {
val ns = splitPrefix(z) match {
@@ -247,7 +246,7 @@ abstract class SymbolicXMLBuilder(p: Parsers#Parser, preserveWS: Boolean) {
val body = mkXML(
pos.makeTransparent,
- false,
+ isPattern = false,
const(pre),
const(newlabel),
makeSymbolicAttrs,
diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
index 8a9ce8907e..3a695c6f59 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala
@@ -11,26 +11,98 @@ import javac._
/** An nsc sub-component.
*/
abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParsers with Scanners with JavaParsers with JavaScanners {
+ import global._
val phaseName = "parser"
-
def newPhase(prev: Phase): StdPhase = new ParserPhase(prev)
- class ParserPhase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
+ abstract class MemberDefTraverser extends Traverser {
+ def onMember(defn: MemberDef): Unit
+
+ private var depth: Int = 0
+ private def lower[T](body: => T): T = {
+ depth += 1
+ try body finally depth -= 1
+ }
+ def currentDepth = depth
+
+ /** Prune this tree and all trees beneath it. Can be overridden. */
+ def prune(md: MemberDef): Boolean = (
+ md.mods.isSynthetic
+ || md.mods.isParamAccessor
+ || nme.isConstructorName(md.name)
+ || (md.name containsName nme.ANON_CLASS_NAME)
+ )
+
+ override def traverse(t: Tree): Unit = t match {
+ case md: MemberDef if prune(md) =>
+ case md @ PackageDef(_, stats) => traverseTrees(stats)
+ case md: ImplDef => onMember(md) ; lower(traverseTrees(md.impl.body))
+ case md: ValOrDefDef => onMember(md) ; lower(traverse(md.rhs))
+ case _ => super.traverse(t)
+ }
+ }
+
+ class MemberPosReporter(unit: CompilationUnit) extends MemberDefTraverser {
+ private var outputFn: MemberDef => String = outputForScreen
+ val path = unit.source.file.path
+
+ // If a single line, outputs the line; if it spans multiple lines
+ // outputs NN,NN with start and end lines, e.g. 15,25.
+ def outputPos(md: MemberDef): String = {
+ val pos = md.pos
+ val start = pos.focusStart.line
+ val end = pos.focusEnd.line
+
+ if (start == end) "" + start else s"$start,$end"
+ }
+ def outputForSed(md: MemberDef): String = {
+ val pos_s = "%-12s" format outputPos(md) + "p"
+ s"$pos_s $path # ${md.keyword} ${md.name}"
+ }
+ def outputForScreen(md: MemberDef): String = {
+ val pos_s = "%-20s" format " " * currentDepth + outputPos(md)
+ s"$pos_s ${md.keyword} ${md.name}"
+ }
+
+ def onMember(md: MemberDef) = println(outputFn(md))
+ // It recognizes "sed" and "anything else".
+ def show(style: String) {
+ if (style == "sed") {
+ outputFn = outputForSed
+ traverse(unit.body)
+ }
+ else {
+ outputFn = outputForScreen
+ println(path)
+ traverse(unit.body)
+ }
+ println("")
+ }
+ }
+
+ private def initialUnitBody(unit: CompilationUnit): Tree = {
+ if (unit.isJava) new JavaUnitParser(unit).parse()
+ else if (global.reporter.incompleteHandled) newUnitParser(unit).parse()
+ else newUnitParser(unit).smartParse()
+ }
+
+ class ParserPhase(prev: Phase) extends StdPhase(prev) {
override val checkable = false
override val keepsTypeParams = false
- def apply(unit: global.CompilationUnit) {
- import global._
+ def apply(unit: CompilationUnit) {
informProgress("parsing " + unit)
- unit.body =
- if (unit.isJava) new JavaUnitParser(unit).parse()
- else if (reporter.incompleteHandled) new UnitParser(unit).parse()
- else new UnitParser(unit).smartParse()
+ // if the body is already filled in, don't overwrite it
+ // otherwise compileLate is going to overwrite bodies of synthetic source files
+ if (unit.body == EmptyTree)
+ unit.body = initialUnitBody(unit)
- if (settings.Yrangepos.value && !reporter.hasErrors)
+ if (settings.Yrangepos && !reporter.hasErrors)
validatePositions(unit.body)
+
+ if (settings.Ymemberpos.isSetByUser)
+ new MemberPosReporter(unit) show (style = settings.Ymemberpos.value)
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
index c3fd414426..5a7dc4950d 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala
@@ -6,15 +6,11 @@
package scala.tools.nsc
package ast.parser
-import scala.annotation.switch
-
/** Common code between JavaTokens and Tokens. Not as much (and not as concrete)
* as one might like because JavaTokens for no clear reason chose new numbers for
* identical token sets.
*/
abstract class Tokens {
- import scala.reflect.internal.Chars._
-
/** special tokens */
final val EMPTY = -3
final val UNDEF = -2
@@ -34,14 +30,6 @@ abstract class Tokens {
def isIdentifier(code: Int): Boolean
def isLiteral(code: Int): Boolean
- def isKeyword(code: Int): Boolean
- def isSymbol(code: Int): Boolean
-
- final def isSpace(at: Char) = at == ' ' || at == '\t'
- final def isNewLine(at: Char) = at == CR || at == LF || at == FF
- final def isBrace(code: Int) = code >= LPAREN && code <= RBRACE
- final def isOpenBrace(code: Int) = isBrace(code) && (code % 2 == 0)
- final def isCloseBrace(code: Int) = isBrace(code) && (code % 2 == 1)
}
object Tokens extends Tokens {
@@ -52,20 +40,10 @@ object Tokens extends Tokens {
def isLiteral(code: Int) =
code >= CHARLIT && code <= INTERPOLATIONID
-
/** identifiers */
final val IDENTIFIER = 10
final val BACKQUOTED_IDENT = 11
- def isIdentifier(code: Int) =
- code >= IDENTIFIER && code <= BACKQUOTED_IDENT
-
- @switch def canBeginExpression(code: Int) = code match {
- case IDENTIFIER|BACKQUOTED_IDENT|USCORE => true
- case LBRACE|LPAREN|LBRACKET|COMMENT => true
- case IF|DO|WHILE|FOR|NEW|TRY|THROW => true
- case NULL|THIS|TRUE|FALSE => true
- case code => isLiteral(code)
- }
+ def isIdentifier(code: Int) = code >= IDENTIFIER && code <= BACKQUOTED_IDENT // used by ide
/** keywords */
final val IF = 20
@@ -113,17 +91,6 @@ object Tokens extends Tokens {
final val MACRO = 62 // not yet used in 2.10
final val THEN = 63 // not yet used in 2.10
- def isKeyword(code: Int) =
- code >= IF && code <= LAZY
-
- @switch def isDefinition(code: Int) = code match {
- case CLASS|TRAIT|OBJECT => true
- case CASECLASS|CASEOBJECT => true
- case DEF|VAL|VAR => true
- case TYPE => true
- case _ => false
- }
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -141,9 +108,6 @@ object Tokens extends Tokens {
final val AT = 83
final val VIEWBOUND = 84
- def isSymbol(code: Int) =
- code >= COMMA && code <= VIEWBOUND
-
/** parenthesis */
final val LPAREN = 90
final val RPAREN = 91
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index e92c3e1654..ec4e932aae 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -26,15 +26,11 @@ abstract class TreeBuilder {
def o2p(offset: Int): Position
def r2p(start: Int, point: Int, end: Int): Position
- def rootId(name: Name) = gen.rootId(name)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
def scalaAnyRefConstr = scalaDot(tpnme.AnyRef)
- def scalaAnyValConstr = scalaDot(tpnme.AnyVal)
- def scalaAnyConstr = scalaDot(tpnme.Any)
def scalaUnitConstr = scalaDot(tpnme.Unit)
def productConstr = scalaDot(tpnme.Product)
- def productConstrN(n: Int) = scalaDot(newTypeName("Product" + n))
def serializableConstr = scalaDot(tpnme.Serializable)
def convertToTypeName(t: Tree) = gen.convertToTypeName(t)
@@ -135,9 +131,9 @@ abstract class TreeBuilder {
}
def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
- case Nil => Literal(Constant())
+ case Nil => Literal(Constant(()))
case List(tree) if flattenUnary => tree
- case _ => makeTuple(trees, false)
+ case _ => makeTuple(trees, isType = false)
}
def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree = trees match {
@@ -147,7 +143,7 @@ abstract class TreeBuilder {
}
def stripParens(t: Tree) = t match {
- case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, true) }
+ case Parens(ts) => atPos(t.pos) { makeTupleTerm(ts, flattenUnary = true) }
case _ => t
}
@@ -175,15 +171,10 @@ abstract class TreeBuilder {
/** Create tree representing (unencoded) binary operation expression or pattern. */
def makeBinop(isExpr: Boolean, left: Tree, op: TermName, right: Tree, opPos: Position): Tree = {
- def mkNamed(args: List[Tree]) =
- if (isExpr) args map {
- case a @ Assign(id @ Ident(name), rhs) =>
- atPos(a.pos) { AssignOrNamedArg(id, rhs) }
- case e => e
- } else args
+ def mkNamed(args: List[Tree]) = if (isExpr) args map treeInfo.assignmentToMaybeNamedArg else args
val arguments = right match {
case Parens(args) => mkNamed(args)
- case _ => List(right)
+ case _ => List(right)
}
if (isExpr) {
if (treeInfo.isLeftAssoc(op)) {
@@ -191,7 +182,7 @@ abstract class TreeBuilder {
} else {
val x = freshTermName()
Block(
- List(ValDef(Modifiers(SYNTHETIC), x, TypeTree(), stripParens(left))),
+ List(ValDef(Modifiers(SYNTHETIC | ARTIFACT), x, TypeTree(), stripParens(left))),
Apply(atPos(opPos union right.pos) { Select(stripParens(right), op.encode) }, List(Ident(x))))
}
} else {
@@ -205,20 +196,26 @@ abstract class TreeBuilder {
*/
def makeAnonymousNew(stats: List[Tree]): Tree = {
val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
+ makeNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
}
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
*/
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
+ def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree],
npos: Position, cpos: Position): Tree =
if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty)
- atPos(npos union cpos) { New(parents.head, argss) }
- else {
+ makeNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
+ else if (parents.tail.isEmpty && stats.isEmpty) {
+ // `Parsers.template` no longer differentiates tpts and their argss
+ // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
+ // instead of parents = Ident(C), argss = Nil as before
+ // this change works great for things that are actually templates
+ // but in this degenerate case we need to perform postprocessing
+ val app = treeInfo.dissectApplied(parents.head)
+ atPos(npos union cpos) { New(app.callee, app.argss) }
+ } else {
val x = tpnme.ANON_CLASS_NAME
atPos(npos union cpos) {
Block(
@@ -226,12 +223,12 @@ abstract class TreeBuilder {
atPos(cpos) {
ClassDef(
Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
+ Template(parents, self, NoMods, ListOfNil, stats, cpos.focus))
}),
atPos(npos) {
New(
Ident(x) setPos npos.focus,
- ListOfNil)
+ Nil)
}
)
}
@@ -264,21 +261,21 @@ abstract class TreeBuilder {
case _ => startPos
}
val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
- val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
+ val rhs = If(cond, Block(List(body), continu), Literal(Constant(())))
LabelDef(lname, Nil, rhs)
}
/** Create tree representing a do-while loop */
def makeDoWhile(lname: TermName, body: Tree, cond: Tree): Tree = {
val continu = Apply(Ident(lname), Nil)
- val rhs = Block(List(body), If(cond, continu, Literal(Constant())))
+ val rhs = Block(List(body), If(cond, continu, Literal(Constant(()))))
LabelDef(lname, Nil, rhs)
}
/** Create block of statements `stats` */
def makeBlock(stats: List[Tree]): Tree =
- if (stats.isEmpty) Literal(Constant())
- else if (!stats.last.isTerm) Block(stats, Literal(Constant()))
+ if (stats.isEmpty) Literal(Constant(()))
+ else if (!stats.last.isTerm) Block(stats, Literal(Constant(())))
else if (stats.length == 1) stats.head
else Block(stats.init, stats.last)
@@ -287,7 +284,7 @@ abstract class TreeBuilder {
CaseDef(condition, EmptyTree, Literal(Constant(true))),
CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
)
- val matchTree = makeVisitor(cases, false, scrutineeName)
+ val matchTree = makeVisitor(cases, checkExhaustive = false, scrutineeName)
atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
}
@@ -367,9 +364,9 @@ abstract class TreeBuilder {
*/
private def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Enumerator], body: Tree): Tree = {
- /** make a closure pat => body.
- * The closure is assigned a transparent position with the point at pos.point and
- * the limits given by pat and body.
+ /* make a closure pat => body.
+ * The closure is assigned a transparent position with the point at pos.point and
+ * the limits given by pat and body.
*/
def makeClosure(pos: Position, pat: Tree, body: Tree): Tree = {
def splitpos = wrappingPos(List(pat, body)).withPoint(pos.point).makeTransparent
@@ -380,38 +377,28 @@ abstract class TreeBuilder {
body) setPos splitpos
case None =>
atPos(splitpos) {
- makeVisitor(List(CaseDef(pat, EmptyTree, body)), false)
+ makeVisitor(List(CaseDef(pat, EmptyTree, body)), checkExhaustive = false)
}
}
}
- /** Make an application qual.meth(pat => body) positioned at `pos`.
+ /* Make an application qual.meth(pat => body) positioned at `pos`.
*/
def makeCombination(pos: Position, meth: TermName, qual: Tree, pat: Tree, body: Tree): Tree =
Apply(Select(qual, meth) setPos qual.pos, List(makeClosure(pos, pat, body))) setPos pos
- /** Optionally, if pattern is a `Bind`, the bound name, otherwise None.
- */
- def patternVar(pat: Tree): Option[Name] = pat match {
- case Bind(name, _) => Some(name)
- case _ => None
- }
-
- /** If `pat` is not yet a `Bind` wrap it in one with a fresh name
- */
+ /* If `pat` is not yet a `Bind` wrap it in one with a fresh name */
def makeBind(pat: Tree): Tree = pat match {
case Bind(_, _) => pat
case _ => Bind(freshName(), pat) setPos pat.pos
}
- /** A reference to the name bound in Bind `pat`.
- */
+ /* A reference to the name bound in Bind `pat`. */
def makeValue(pat: Tree): Tree = pat match {
case Bind(name, _) => Ident(name) setPos pat.pos.focus
}
- /** The position of the closure that starts with generator at position `genpos`.
- */
+ /* The position of the closure that starts with generator at position `genpos`. */
def closurePos(genpos: Position) = {
val end = body.pos match {
case NoPosition => genpos.point
@@ -432,7 +419,7 @@ abstract class TreeBuilder {
ValFrom(pos, pat, makeCombination(rhs.pos union test.pos, nme.withFilter, rhs, pat.duplicate, test)) :: rest,
body)
case ValFrom(pos, pat, rhs) :: rest =>
- val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq]);
+ val valeqs = rest.take(definitions.MaxTupleArity - 1).takeWhile(_.isInstanceOf[ValEq])
assert(!valeqs.isEmpty)
val rest1 = rest.drop(valeqs.length)
val pats = valeqs map { case ValEq(_, pat, _) => pat }
@@ -443,9 +430,9 @@ abstract class TreeBuilder {
val ids = (defpat1 :: defpats) map makeValue
val rhs1 = makeForYield(
List(ValFrom(pos, defpat1, rhs)),
- Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, true) }) setPos wrappingPos(pdefs))
+ Block(pdefs, atPos(wrappingPos(ids)) { makeTupleTerm(ids, flattenUnary = true) }) setPos wrappingPos(pdefs))
val allpats = (pat :: pats) map (_.duplicate)
- val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, false) } , rhs1)
+ val vfrom1 = ValFrom(r2p(pos.startOrPoint, pos.point, rhs1.pos.endOrPoint), atPos(wrappingPos(allpats)) { makeTuple(allpats, isType = false) } , rhs1)
makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
case _ =>
EmptyTree //may happen for erroneous input
@@ -462,18 +449,6 @@ abstract class TreeBuilder {
def makeForYield(enums: List[Enumerator], body: Tree): Tree =
makeFor(nme.map, nme.flatMap, enums, body)
- /** Create tree for a lifted expression XX-LIFTING
- */
- def makeLifted(gs: List[ValFrom], body: Tree): Tree = {
- def combine(gs: List[ValFrom]): ValFrom = (gs: @unchecked) match {
- case g :: Nil => g
- case ValFrom(pos1, pat1, rhs1) :: gs2 =>
- val ValFrom(pos2, pat2, rhs2) = combine(gs2)
- ValFrom(pos1, makeTuple(List(pat1, pat2), false), Apply(Select(rhs1, nme.zip), List(rhs2)))
- }
- makeForYield(List(combine(gs)), body)
- }
-
/** Create tree for a pattern alternative */
def makeAlternative(ts: List[Tree]): Tree = {
def alternatives(t: Tree): List[Tree] = t match {
@@ -508,7 +483,7 @@ abstract class TreeBuilder {
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
val binder = freshTermName("x")
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
- val catchDef = ValDef(NoMods, freshTermName("catchExpr"), TypeTree(), catchExpr)
+ val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
val body = atPos(catchExpr.pos.makeTransparent)(Block(
List(catchDef),
@@ -566,7 +541,7 @@ abstract class TreeBuilder {
rhs1,
List(
atPos(pat1.pos) {
- CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, true))
+ CaseDef(pat1, EmptyTree, makeTupleTerm(vars map (_._1) map Ident.apply, flattenUnary = true))
}
))
}
@@ -579,7 +554,7 @@ abstract class TreeBuilder {
val tmp = freshTermName()
val firstDef =
atPos(matchExpr.pos) {
- ValDef(Modifiers(PrivateLocal | SYNTHETIC | (mods.flags & LAZY)),
+ ValDef(Modifiers(PrivateLocal | SYNTHETIC | ARTIFACT | (mods.flags & LAZY)),
tmp, TypeTree(), matchExpr)
}
var cnt = 0
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index fc5d4372c5..00f2933fab 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -7,8 +7,7 @@ package scala.tools.nsc
package backend
import io.AbstractFile
-import util.{ClassPath,JavaClassPath,MergedClassPath,DeltaClassPath}
-import util.ClassPath.{ JavaContext, DefaultJavaContext }
+import util.{ClassPath,MergedClassPath,DeltaClassPath}
import scala.tools.util.PathResolver
trait JavaPlatform extends Platform {
@@ -39,18 +38,10 @@ trait JavaPlatform extends Platform {
// replaces the tighter abstract definition here. If we had DOT typing rules, the two
// types would be conjoined and everything would work out. Yet another reason to push for DOT.
- private def depAnalysisPhase =
- if (settings.make.isDefault) Nil
- else List(dependencyAnalysis)
-
- private def classEmitPhase =
- if (settings.target.value == "jvm-1.5-fjbg") genJVM
- else genASM
-
def platformPhases = List(
flatten, // get rid of inner classes
- classEmitPhase // generate .class files
- ) ++ depAnalysisPhase
+ genASM // generate .class files
+ )
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
lazy val externalEqualsNumNum = getDecl(BoxesRunTimeClass, nme.equalsNumNum)
diff --git a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala b/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
deleted file mode 100644
index 4493685b52..0000000000
--- a/src/compiler/scala/tools/nsc/backend/MSILPlatform.scala
+++ /dev/null
@@ -1,69 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package backend
-
-import ch.epfl.lamp.compiler.{ msil => msillib }
-import util.{ ClassPath, MsilClassPath }
-import msil.GenMSIL
-import io.{ AbstractFile, MsilFile }
-
-trait MSILPlatform extends Platform {
- import global._
- import definitions.{ ComparatorClass, BoxedNumberClass, getMember }
-
- type BinaryRepr = MsilFile
-
- if (settings.verbose.value)
- inform("[AssemRefs = " + settings.assemrefs.value + "]")
-
- // phaseName = "msil"
- object genMSIL extends {
- val global: MSILPlatform.this.global.type = MSILPlatform.this.global
- val runsAfter = List[String]("dce")
- val runsRightAfter = None
- } with GenMSIL
-
- lazy val classPath = MsilClassPath.fromSettings(settings)
- def rootLoader = new loaders.PackageLoader(classPath.asInstanceOf[ClassPath[platform.BinaryRepr]])
- // See discussion in JavaPlatForm for why we need a cast here.
-
- /** Update classpath with a substituted subentry */
- def updateClassPath(subst: Map[ClassPath[BinaryRepr], ClassPath[BinaryRepr]]) =
- throw new UnsupportedOperationException("classpath invalidations not supported on MSIL")
-
- def platformPhases = List(
- genMSIL // generate .msil files
- )
-
- lazy val externalEquals = getMember(ComparatorClass.companionModule, nme.equals_)
- def isMaybeBoxed(sym: Symbol) = sym isNonBottomSubClass BoxedNumberClass
-
- def newClassLoader(bin: MsilFile): loaders.SymbolLoader = new loaders.MsilFileLoader(bin)
-
- /**
- * Tells whether a class should be loaded and entered into the package
- * scope. On .NET, this method returns `false` for all synthetic classes
- * (anonymous classes, implementation classes, module classes), their
- * symtab is encoded in the pickle of another class.
- */
- def doLoad(cls: ClassPath[BinaryRepr]#ClassRep): Boolean = {
- if (cls.binary.isDefined) {
- val typ = cls.binary.get.msilType
- if (typ.IsDefined(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(loaders.clrTypes.SCALA_SYMTAB_ATTR, false)
- assert(attrs.length == 1, attrs.length)
- val a = attrs(0).asInstanceOf[msillib.Attribute]
- // symtab_constr takes a byte array argument (the pickle), i.e. typ has a pickle.
- // otherwise, symtab_default_constr was used, which marks typ as scala-synthetic.
- a.getConstructor() == loaders.clrTypes.SYMTAB_CONSTR
- } else true // always load non-scala types
- } else true // always load source
- }
-
- def needCompile(bin: MsilFile, src: AbstractFile) =
- false // always use compiled file on .net
-}
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 8cbb5bc980..b8ddb65de9 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -3,10 +3,10 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
-import scala.tools.nsc.backend.icode._
import scala.collection.{ mutable, immutable }
/** Scala primitive operations are represented as methods in `Any` and
@@ -442,15 +442,17 @@ abstract class ScalaPrimitives {
}
def addPrimitives(cls: Symbol, method: Name, code: Int) {
- val tpe = cls.info
- val sym = tpe.member(method)
- if (sym == NoSymbol)
- inform("Unknown primitive method " + cls + "." + method)
- for (s <- sym.alternatives)
- addPrimitive(
- s,
- if (code == ADD && s.info.paramTypes.head == definitions.StringClass.tpe) CONCAT
- else code)
+ val alts = (cls.info member method).alternatives
+ if (alts.isEmpty)
+ inform(s"Unknown primitive method $cls.$method")
+ else alts foreach (s =>
+ addPrimitive(s,
+ s.info.paramTypes match {
+ case tp :: _ if code == ADD && tp =:= StringTpe => CONCAT
+ case _ => code
+ }
+ )
+ )
}
def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
@@ -495,8 +497,8 @@ abstract class ScalaPrimitives {
def isArraySet(code: Int): Boolean = code match {
case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET |
IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET |
- OARRAY_SET | UPDATE => true;
- case _ => false;
+ OARRAY_SET | UPDATE => true
+ case _ => false
}
/** Check whether the given code is a comparison operator */
@@ -515,7 +517,7 @@ abstract class ScalaPrimitives {
DIV | MOD => true; // binary
case OR | XOR | AND |
LSL | LSR | ASR => true; // bitwise
- case _ => false;
+ case _ => false
}
def isLogicalOp(code: Int): Boolean = code match {
@@ -565,7 +567,7 @@ abstract class ScalaPrimitives {
import definitions._
val code = getPrimitive(fun)
- def elementType = beforeTyper {
+ def elementType = enteringTyper {
val arrayParent = tpe :: tpe.parents collectFirst {
case TypeRef(_, ArrayClass, elem :: Nil) => elem
}
diff --git a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
index 798a80ea37..45ca39fee4 100644
--- a/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
+++ b/src/compiler/scala/tools/nsc/backend/WorklistAlgorithm.scala
@@ -6,8 +6,7 @@
package scala.tools.nsc
package backend
-import scala.tools.nsc.ast._
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/**
* Simple implementation of a worklist algorithm. A processing
@@ -32,8 +31,6 @@ trait WorklistAlgorithm {
* Run the iterative algorithm until the worklist remains empty.
* The initializer is run once before the loop starts and should
* initialize the worklist.
- *
- * @param initWorklist ...
*/
def run(initWorklist: => Unit) = {
initWorklist
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index d50d4cd125..26d10d2514 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -8,8 +8,7 @@ package backend
package icode
import scala.collection.{ mutable, immutable }
-import mutable.{ ListBuffer, ArrayBuffer }
-import scala.reflect.internal.util.{ Position, NoPosition }
+import mutable.ListBuffer
import backend.icode.analysis.ProgramPoint
import scala.language.postfixOps
@@ -17,8 +16,7 @@ trait BasicBlocks {
self: ICodes =>
import opcodes._
- import global.{ ifDebug, settings, log, nme }
- import nme.isExceptionResultName
+ import global._
/** Override Array creation for efficiency (to not go through reflection). */
private implicit val instructionTag: scala.reflect.ClassTag[Instruction] = new scala.reflect.ClassTag[Instruction] {
@@ -38,7 +36,7 @@ trait BasicBlocks {
import BBFlags._
- def code = method.code
+ def code = if (method eq null) NoCode else method.code
private final class SuccessorList() {
private var successors: List[BasicBlock] = Nil
@@ -68,10 +66,10 @@ trait BasicBlocks {
addBlock(scratchBlocks.head)
scratchBlocks = scratchBlocks.tail
}
- /** Return a list of successors for 'b' that come from exception handlers
- * covering b's (non-exceptional) successors. These exception handlers
- * might not cover 'b' itself. This situation corresponds to an
- * exception being thrown as the first thing of one of b's successors.
+ /* Return a list of successors for 'b' that come from exception handlers
+ * covering b's (non-exceptional) successors. These exception handlers
+ * might not cover 'b' itself. This situation corresponds to an
+ * exception being thrown as the first thing of one of b's successors.
*/
while (scratchHandlers ne Nil) {
val handler = scratchHandlers.head
@@ -122,7 +120,7 @@ trait BasicBlocks {
def closed: Boolean = hasFlag(CLOSED)
def closed_=(b: Boolean) = if (b) setFlag(CLOSED) else resetFlag(CLOSED)
- /** When set, the <code>emit</code> methods will be ignored. */
+ /** When set, the `emit` methods will be ignored. */
def ignore: Boolean = hasFlag(IGNORING)
def ignore_=(b: Boolean) = if (b) setFlag(IGNORING) else resetFlag(IGNORING)
@@ -260,13 +258,9 @@ trait BasicBlocks {
}
}
- /** Replaces <code>oldInstr</code> with <code>is</code>. It does not update
+ /** Replaces `oldInstr` with `is`. It does not update
* the position field in the newly inserted instructions, so it behaves
* differently than the one-instruction versions of this function.
- *
- * @param iold ..
- * @param is ..
- * @return ..
*/
def replaceInstruction(oldInstr: Instruction, is: List[Instruction]): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
@@ -280,17 +274,7 @@ trait BasicBlocks {
}
}
- /** Insert instructions in 'is' immediately after index 'idx'. */
- def insertAfter(idx: Int, is: List[Instruction]) {
- assert(closed, "Instructions can be replaced only after the basic block is closed")
-
- instrs = instrs.patch(idx + 1, is, 0)
- code.touched = true
- }
-
/** Removes instructions found at the given positions.
- *
- * @param positions ...
*/
def removeInstructionsAt(positions: Int*) {
assert(closed, this)
@@ -311,8 +295,6 @@ trait BasicBlocks {
}
/** Replaces all instructions found in the map.
- *
- * @param map ...
*/
def subst(map: Map[Instruction, Instruction]): Unit =
if (!closed)
@@ -344,21 +326,17 @@ trait BasicBlocks {
* is closed, which sets the DIRTYSUCCS flag.
*/
def emit(instr: Instruction, pos: Position) {
-/* if (closed) {
- print()
- Console.println("trying to emit: " + instr)
- } */
assert(!closed || ignore, this)
if (ignore) {
- if (settings.debug.value) {
- /** Trying to pin down what it's likely to see after a block has been
- * put into ignore mode so we hear about it if there's a problem.
+ if (settings.debug) {
+ /* Trying to pin down what it's likely to see after a block has been
+ * put into ignore mode so we hear about it if there's a problem.
*/
instr match {
- case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
- case STORE_LOCAL(local) if isExceptionResultName(local.sym.name) => // ok
- case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
+ case JUMP(_) | RETURN(_) | THROW(_) | SCOPE_EXIT(_) => // ok
+ case STORE_LOCAL(local) if nme.isExceptionResultName(local.sym.name) => // ok
+ case x => log("Ignoring instruction, possibly at our peril, at " + pos + ": " + x)
}
}
}
@@ -403,7 +381,6 @@ trait BasicBlocks {
/** Close the block */
def close() {
assert(!closed || ignore, this)
- assert(instructionList.nonEmpty, "Empty block: " + this)
if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed`
// not doing anything to this block is important...
// because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed)
@@ -413,9 +390,38 @@ trait BasicBlocks {
setFlag(DIRTYSUCCS)
instructionList = instructionList.reverse
instrs = instructionList.toArray
+ if (instructionList.isEmpty) {
+ debuglog(s"Removing empty block $this")
+ code removeBlock this
+ }
+ }
+ }
+
+ /**
+ * if cond is true, closes this block, entersIgnoreMode, and removes the block from
+ * its list of blocks. Used to allow a block to be started and then cancelled when it
+ * is discovered to be unreachable.
+ */
+ def killIf(cond: Boolean) {
+ if (!settings.YdisableUnreachablePrevention && cond) {
+ debuglog(s"Killing block $this")
+ assert(instructionList.isEmpty, s"Killing a non empty block $this")
+ // only checked under debug because fetching predecessor list is moderately expensive
+ if (settings.debug)
+ assert(predecessors.isEmpty, s"Killing block $this which is referred to from ${predecessors.mkString}")
+
+ close()
+ enterIgnoreMode()
}
}
+ /**
+ * Same as killIf but with the logic of the condition reversed
+ */
+ def killUnless(cond: Boolean) {
+ this killIf !cond
+ }
+
def open() {
assert(closed, this)
closed = false
@@ -441,11 +447,6 @@ trait BasicBlocks {
ignore = true
}
- def exitIgnoreMode() {
- assert(ignore, "Exit ignore mode when not in ignore mode: " + this)
- ignore = false
- }
-
/** Return the last instruction of this basic block. */
def lastInstruction =
if (closed) instrs(instrs.length - 1)
@@ -474,16 +475,17 @@ trait BasicBlocks {
def directSuccessors: List[BasicBlock] =
if (isEmpty) Nil else lastInstruction match {
- case JUMP(whereto) => whereto :: Nil
- case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
- case SWITCH(_, labels) => labels
- case RETURN(_) => Nil
- case THROW(_) => Nil
- case _ =>
+ case JUMP(whereto) => whereto :: Nil
+ case CJUMP(succ, fail, _, _) => fail :: succ :: Nil
+ case CZJUMP(succ, fail, _, _) => fail :: succ :: Nil
+ case SWITCH(_, labels) => labels
+ case RETURN(_) => Nil
+ case THROW(_) => Nil
+ case _ =>
if (closed)
- dumpClassesAndAbort("The last instruction is not a control flow instruction: " + lastInstruction)
- else Nil
+ devWarning(s"$lastInstruction/${lastInstruction.getClass.getName} is not a control flow instruction")
+
+ Nil
}
/** Returns the predecessors of this block. */
@@ -502,17 +504,6 @@ trait BasicBlocks {
override def hashCode = label * 41 + code.hashCode
- // Instead of it, rather use a printer
- def print() { print(java.lang.System.out) }
-
- def print(out: java.io.PrintStream) {
- out.println("block #"+label+" :")
- foreach(i => out.println(" " + i))
- out.print("Successors: ")
- successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString()))
- out.println()
- }
-
private def succString = if (successors.isEmpty) "[S: N/A]" else successors.distinct.mkString("[S: ", ", ", "]")
private def predString = if (predecessors.isEmpty) "[P: N/A]" else predecessors.distinct.mkString("[P: ", ", ", "]")
@@ -532,18 +523,6 @@ trait BasicBlocks {
}
object BBFlags {
- val flagMap = Map[Int, String](
- LOOP_HEADER -> "loopheader",
- IGNORING -> "ignore",
- EX_HEADER -> "exheader",
- CLOSED -> "closed",
- DIRTYSUCCS -> "dirtysuccs",
- DIRTYPREDS -> "dirtypreds"
- )
- def flagsToString(flags: Int) = {
- flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
- }
-
/** This block is a loop header (was translated from a while). */
final val LOOP_HEADER = (1 << 0)
@@ -561,4 +540,16 @@ object BBFlags {
/** Code has been changed, recompute predecessors. */
final val DIRTYPREDS = (1 << 5)
+
+ val flagMap = Map[Int, String](
+ LOOP_HEADER -> "loopheader",
+ IGNORING -> "ignore",
+ EX_HEADER -> "exheader",
+ CLOSED -> "closed",
+ DIRTYSUCCS -> "dirtysuccs",
+ DIRTYPREDS -> "dirtypreds"
+ )
+ def flagsToString(flags: Int) = {
+ flagMap collect { case (bit, name) if (bit & flags) != 0 => "<" + name + ">" } mkString " "
+ }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
index 2cebf7ad99..7243264773 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ExceptionHandlers.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package backend
package icode
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
/**
* Exception handlers are pieces of code that `handle` exceptions on
@@ -24,14 +24,11 @@ trait ExceptionHandlers {
class ExceptionHandler(val method: IMethod, val label: TermName, val cls: Symbol, val pos: Position) {
def loadExceptionClass = if (cls == NoSymbol) ThrowableClass else cls
- private var _startBlock: BasicBlock = _;
- var finalizer: Finalizer = _;
-
- /** Needed for the MSIL backend. */
- var resultKind: TypeKind = _;
+ private var _startBlock: BasicBlock = _
+ var finalizer: Finalizer = _
def setStartBlock(b: BasicBlock) = {
- _startBlock = b;
+ _startBlock = b
b.exceptionHandlerStart = true
}
def startBlock = _startBlock
@@ -49,11 +46,11 @@ trait ExceptionHandlers {
/** The body of this exception handler. May contain 'dead' blocks (which will not
* make it into generated code because linearizers may not include them) */
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
- def addBlock(b: BasicBlock): Unit = blocks = b :: blocks;
+ def addBlock(b: BasicBlock): Unit = blocks = b :: blocks
- override def toString() = "exh_" + label + "(" + cls.simpleName + ")";
+ override def toString() = "exh_" + label + "(" + cls.simpleName + ")"
/** A standard copy constructor */
def this(other: ExceptionHandler) = {
@@ -71,10 +68,4 @@ trait ExceptionHandlers {
override def toString() = "finalizer_" + label
override def dup: Finalizer = new Finalizer(method, label, pos)
}
-
- object NoFinalizer extends Finalizer(null, newTermNameCached("<no finalizer>"), NoPosition) {
- override def startBlock: BasicBlock = sys.error("NoFinalizer cannot have a start block.");
- override def setStartBlock(b: BasicBlock): Unit = sys.error("NoFinalizer cannot have a start block.");
- override def dup = this
- }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 44d7a1929b..7263a0d0b9 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
@@ -13,10 +14,8 @@ import scala.collection.mutable.{ ListBuffer, Buffer }
import scala.tools.nsc.symtab._
import scala.annotation.switch
import PartialFunction._
-import scala.language.postfixOps
-/** This class ...
- *
+/**
* @author Iulian Dragos
* @version 1.0
*/
@@ -24,12 +23,7 @@ abstract class GenICode extends SubComponent {
import global._
import icodes._
import icodes.opcodes._
- import definitions.{
- ArrayClass, ObjectClass, ThrowableClass, StringClass, StringModule, AnyRefClass,
- Object_equals, Object_isInstanceOf, Object_asInstanceOf, ScalaRunTimeModule,
- BoxedNumberClass, BoxedCharacterClass,
- getMember
- }
+ import definitions._
import scalaPrimitives.{
isArrayOp, isComparisonOp, isLogicalOp,
isUniversalEqualityOp, isReferenceEqualityOp
@@ -41,7 +35,7 @@ abstract class GenICode extends SubComponent {
override def newPhase(prev: Phase) = new ICodePhase(prev)
@inline private def debugassert(cond: => Boolean, msg: => Any) {
- if (settings.debug.value)
+ if (settings.debug)
assert(cond, msg)
}
@@ -52,14 +46,14 @@ abstract class GenICode extends SubComponent {
var unit: CompilationUnit = NoCompilationUnit
override def run() {
- scalaPrimitives.init
+ scalaPrimitives.init()
classes.clear()
super.run()
}
override def apply(unit: CompilationUnit): Unit = {
this.unit = unit
- unit.icode.clear
+ unit.icode.clear()
informProgress("Generating icode for " + unit)
gen(unit.body)
this.unit = NoCompilationUnit
@@ -93,7 +87,7 @@ abstract class GenICode extends SubComponent {
debuglog("Generating class: " + tree.symbol.fullName)
val outerClass = ctx.clazz
ctx setClass (new IClass(tree.symbol) setCompilationUnit unit)
- addClassFields(ctx, tree.symbol);
+ addClassFields(ctx, tree.symbol)
classes += (tree.symbol -> ctx.clazz)
unit.icode += ctx.clazz
gen(impl, ctx)
@@ -121,7 +115,7 @@ abstract class GenICode extends SubComponent {
m.native = m.symbol.hasAnnotation(definitions.NativeAttr)
if (!m.isAbstractMethod && !m.native) {
- ctx1 = genLoad(rhs, ctx1, m.returnType);
+ ctx1 = genLoad(rhs, ctx1, m.returnType)
// reverse the order of the local variables, to match the source-order
m.locals = m.locals.reverse
@@ -131,7 +125,7 @@ abstract class GenICode extends SubComponent {
case Return(_) => ()
case EmptyTree =>
globalError("Concrete method has no definition: " + tree + (
- if (settings.debug.value) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
+ if (settings.debug) "(found: " + m.symbol.owner.info.decls.toList.mkString(", ") + ")"
else "")
)
case _ => if (ctx1.bb.isEmpty)
@@ -139,7 +133,7 @@ abstract class GenICode extends SubComponent {
else
ctx1.bb.closeWith(RETURN(m.returnType))
}
- if (!ctx1.bb.closed) ctx1.bb.close
+ if (!ctx1.bb.closed) ctx1.bb.close()
prune(ctx1.method)
} else
ctx1.method.setCode(NoCode)
@@ -160,18 +154,13 @@ abstract class GenICode extends SubComponent {
* and not produce any value. Use genLoad for expressions which leave
* a value on top of the stack.
*
- * @param tree ...
- * @param ctx ...
* @return a new context. This is necessary for control flow instructions
* which may change the current basic block.
*/
private def genStat(tree: Tree, ctx: Context): Context = tree match {
case Assign(lhs @ Select(_, _), rhs) =>
val isStatic = lhs.symbol.isStaticMember
- var ctx1 = if (isStatic) ctx
- else if (forMSIL && msil_IsValuetypeInstField(lhs.symbol))
- msil_genLoadQualifierAddress(lhs, ctx)
- else genLoadQualifier(lhs, ctx)
+ var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
@@ -188,12 +177,12 @@ abstract class GenICode extends SubComponent {
}
private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = {
- require(expr.tpe <:< ThrowableClass.tpe, expr.tpe)
+ require(expr.tpe <:< ThrowableTpe, expr.tpe)
val thrownKind = toTypeKind(expr.tpe)
val ctx1 = genLoad(expr, ctx, thrownKind)
ctx1.bb.emit(THROW(expr.tpe.typeSymbol), expr.pos)
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
(ctx1, NothingReference)
}
@@ -231,10 +220,10 @@ abstract class GenICode extends SubComponent {
// binary operation
case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+ resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil)
if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
assert(resKind.isIntegralType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+ resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1)
ctx1 = genLoad(larg, ctx1, resKind)
ctx1 = genLoad(rarg,
@@ -264,11 +253,6 @@ abstract class GenICode extends SubComponent {
}
/** Generate primitive array operations.
- *
- * @param tree ...
- * @param ctx ...
- * @param code ...
- * @return ...
*/
private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
import scalaPrimitives._
@@ -283,14 +267,19 @@ abstract class GenICode extends SubComponent {
if (scalaPrimitives.isArrayGet(code)) {
// load argument on stack
debugassert(args.length == 1,
- "Too many arguments for array get operation: " + tree);
+ "Too many arguments for array get operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
generatedType = elem
ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
+ // it's tempting to just drop array loads of type Null instead
+ // of adapting them but array accesses can cause
+ // ArrayIndexOutOfBounds so we can't. Besides, Array[Null]
+ // probably isn't common enough to figure out an optimization
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
}
else if (scalaPrimitives.isArraySet(code)) {
debugassert(args.length == 2,
- "Too many arguments for array set operation: " + tree);
+ "Too many arguments for array set operation: " + tree)
ctx1 = genLoad(args.head, ctx1, INT)
ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
// the following line should really be here, but because of bugs in erasure
@@ -308,11 +297,8 @@ abstract class GenICode extends SubComponent {
}
private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val Apply(fun, args) = tree
- val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+ val monitor = ctx.makeLocal(tree.pos, ObjectTpe, "monitor")
var monitorResult: Local = null
-
- // if the synchronized block returns a result, store it in a local variable. just leaving
- // it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks)
val argTpe = args.head.tpe
val hasResult = expectedType != UNIT
if (hasResult)
@@ -345,7 +331,7 @@ abstract class GenICode extends SubComponent {
MONITOR_EXIT() setPos tree.pos,
THROW(ThrowableClass)
))
- exhCtx.bb.enterIgnoreMode
+ exhCtx.bb.enterIgnoreMode()
exhCtx
})), EmptyTree, tree)
@@ -359,9 +345,9 @@ abstract class GenICode extends SubComponent {
private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val If(cond, thenp, elsep) = tree
- var thenCtx = ctx.newBlock
- var elseCtx = ctx.newBlock
- val contCtx = ctx.newBlock
+ var thenCtx = ctx.newBlock()
+ var elseCtx = ctx.newBlock()
+ val contCtx = ctx.newBlock()
genCond(cond, ctx, thenCtx, elseCtx)
@@ -386,12 +372,14 @@ abstract class GenICode extends SubComponent {
"I produce UNIT in a context where " + expectedType + " is expected!")
// alternatives may be already closed by a tail-recursive jump
+ val contReachable = !(thenCtx.bb.ignore && elseCtx.bb.ignore)
thenCtx.bb.closeWith(JUMP(contCtx.bb))
elseCtx.bb.closeWith(
if (elsep == EmptyTree) JUMP(contCtx.bb)
else JUMP(contCtx.bb) setPos tree.pos
)
+ contCtx.bb killUnless contReachable
(contCtx, resKind)
}
private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
@@ -414,8 +402,8 @@ abstract class GenICode extends SubComponent {
(pat.symbol.tpe.typeSymbol, kind, {
ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
- genLoad(body, ctx, kind);
+ ctx.bb.emit(STORE_LOCAL(exception), pat.pos)
+ genLoad(body, ctx, kind)
})
}
}
@@ -432,7 +420,7 @@ abstract class GenICode extends SubComponent {
private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), args) = tree
+ val Apply(fun @ Select(receiver, _), _) = tree
val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
if (scalaPrimitives.isArithmeticOp(code))
@@ -444,7 +432,7 @@ abstract class GenICode extends SubComponent {
else if (isArrayOp(code))
genArrayOp(tree, ctx, code, expectedType)
else if (isLogicalOp(code) || isComparisonOp(code)) {
- val trueCtx, falseCtx, afterCtx = ctx.newBlock
+ val trueCtx, falseCtx, afterCtx = ctx.newBlock()
genCond(tree, ctx, trueCtx, falseCtx)
trueCtx.bb.emitOnly(
@@ -471,132 +459,6 @@ abstract class GenICode extends SubComponent {
}
/**
- * forMSIL
- */
- private def msil_IsValuetypeInstMethod(msym: Symbol) = (
- loaders.clrTypes.methods get msym exists (mMSIL =>
- mMSIL.IsInstance && mMSIL.DeclaringType.IsValueType
- )
- )
- private def msil_IsValuetypeInstField(fsym: Symbol) = (
- loaders.clrTypes.fields get fsym exists (fMSIL =>
- !fMSIL.IsStatic && fMSIL.DeclaringType.IsValueType
- )
- )
-
- /**
- * forMSIL: Adds a local var, the emitted code requires one more slot on the stack as on entry
- */
- private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) {
- val REFERENCE(clssym) = kind
- assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym)
- val local = ctx.makeLocal(pos, clssym.tpe, "tmp")
- ctx.method.addLocal(local)
- ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos)
- ctx.bb.emit(CIL_INITOBJ(kind), pos)
- val instr = if (leaveAddressOnStackInstead)
- CIL_LOAD_LOCAL_ADDRESS(local)
- else
- LOAD_LOCAL(local)
- ctx.bb.emit(instr, pos)
- }
-
- /**
- * forMSIL
- */
- private def msil_genLoadAddressOf(tree: Tree, ctx: Context, expectedType: TypeKind, butRawValueIsAlsoGoodEnough: Boolean): Context = {
- var generatedType = expectedType
- var addressTaken = false
- debuglog("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
-
- var resCtx: Context = tree match {
-
- // emits CIL_LOAD_FIELD_ADDRESS
- case Select(qualifier, selector) if (!tree.symbol.isModule) =>
- addressTaken = true
- val sym = tree.symbol
- generatedType = toTypeKind(sym.info)
-
- if (sym.isStaticMember) {
- ctx.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, true), tree.pos)
- ctx
- } else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(CIL_LOAD_FIELD_ADDRESS(sym, false), tree.pos)
- ctx1
- }
-
- // emits CIL_LOAD_LOCAL_ADDRESS
- case Ident(name) if (!tree.symbol.isPackage && !tree.symbol.isModule)=>
- addressTaken = true
- val sym = tree.symbol
- try {
- val Some(l) = ctx.method.lookupLocal(sym)
- ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(l), tree.pos)
- generatedType = l.kind // actually, should be "V&" but the callsite is aware of this
- } catch {
- case ex: MatchError =>
- abort("symbol " + sym + " does not exist in " + ctx.method)
- }
- ctx
-
- // emits CIL_LOAD_ARRAY_ITEM_ADDRESS
- case Apply(fun, args) =>
- if (isPrimitive(fun.symbol)) {
-
- val sym = tree.symbol
- val Apply(fun @ Select(receiver, _), args) = tree
- val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
-
- if (isArrayOp(code)) {
- val arrayObj = receiver
- val k = toTypeKind(arrayObj.tpe)
- val ARRAY(elementType) = k
- if (scalaPrimitives.isArrayGet(code)) {
- var ctx1 = genLoad(arrayObj, ctx, k)
- // load argument on stack
- debugassert(args.length == 1, "Too many arguments for array get operation: " + tree)
- ctx1 = genLoad(args.head, ctx1, INT)
- generatedType = elementType // actually "managed pointer to element type" but the callsite is aware of this
- ctx1.bb.emit(CIL_LOAD_ARRAY_ITEM_ADDRESS(elementType), tree.pos)
- addressTaken = true
- ctx1
- } else null
- } else null
- } else null
-
- case This(qual) =>
- /* TODO: this case handler is a placeholder for the time when Level 2 support for valuetypes is in place,
- in particular when invoking other methods on this where this is a valuetype value (boxed or not).
- As receiver, a managed pointer is expected, and a plain ldarg.0 achieves just that. */
- addressTaken = true
- genLoad(tree, ctx, expectedType)
-
- case _ =>
- null /* A method returning ByRef won't pass peverify, so I guess this case handler is dead code.
- Even if it's not, the code below to handler !addressTaken below. */
- }
-
- if (!addressTaken) {
- resCtx = genLoad(tree, ctx, expectedType)
- if (!butRawValueIsAlsoGoodEnough) {
- // raw value on stack (must be an intermediate result, e.g. returned by method call), take address
- addressTaken = true
- val boxType = expectedType // toTypeKind(expectedType /* TODO FIXME */)
- resCtx.bb.emit(BOX(boxType), tree.pos)
- resCtx.bb.emit(CIL_UNBOX(boxType), tree.pos)
- }
- }
-
- // emit conversion
- if (generatedType != expectedType)
- abort("Unexpected tree in msil_genLoadAddressOf: " + tree + " at: " + tree.pos)
-
- resCtx
- }
-
-
- /**
* Generate code for trees that produce values on the stack
*
* @param tree The tree to be translated
@@ -613,7 +475,11 @@ abstract class GenICode extends SubComponent {
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
def genLoadLabelDef = {
- val ctx1 = ctx.newBlock
+ val ctx1 = ctx.newBlock() // note: we cannot kill ctx1 if ctx is in ignore mode because
+ // label defs can be the target of jumps from other locations.
+ // that means label defs can lead to unreachable code without
+ // proper reachability analysis
+
if (nme.isLoopHeaderLabel(name))
ctx1.bb.loopHeader = true
@@ -627,7 +493,7 @@ abstract class GenICode extends SubComponent {
val pair = (tree.symbol -> (new Label(tree.symbol) anchor ctx1.bb setParams (params map (_.symbol))))
debuglog("Adding label " + tree.symbol.fullLocationString + " in genLoad.")
ctx1.labels += pair
- ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)));
+ ctx.method.addLocals(params map (p => new Local(p.symbol, toTypeKind(p.symbol.info), false)))
}
ctx.bb.closeWith(JUMP(ctx1.bb), tree.pos)
@@ -645,13 +511,13 @@ abstract class GenICode extends SubComponent {
val local = ctx.method.addLocal(new Local(sym, toTypeKind(sym.info), false))
if (rhs == EmptyTree) {
- debuglog("Uninitialized variable " + tree + " at: " + (tree.pos));
+ debuglog("Uninitialized variable " + tree + " at: " + (tree.pos))
ctx.bb.emit(getZeroOf(local.kind))
}
var ctx1 = ctx
if (rhs != EmptyTree)
- ctx1 = genLoad(rhs, ctx, local.kind);
+ ctx1 = genLoad(rhs, ctx, local.kind)
ctx1.bb.emit(STORE_LOCAL(local), tree.pos)
ctx1.scope.add(local)
@@ -695,7 +561,8 @@ abstract class GenICode extends SubComponent {
// we have to run this without the same finalizer in
// the list, otherwise infinite recursion happens for
// finalizers that contain 'return'
- val fctx = finalizerCtx.newBlock
+ val fctx = finalizerCtx.newBlock()
+ fctx.bb killIf ctx1.bb.ignore
ctx1.bb.closeWith(JUMP(fctx.bb))
ctx1 = genLoad(f1, fctx, UNIT)
}
@@ -708,7 +575,7 @@ abstract class GenICode extends SubComponent {
}
adapt(returnedKind, ctx1.method.returnType, ctx1, tree.pos)
ctx1.bb.emit(RETURN(ctx.method.returnType), tree.pos)
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
generatedType = expectedType
ctx1
}
@@ -760,7 +627,7 @@ abstract class GenICode extends SubComponent {
} else {
genCast(l, r, ctx1, cast)
}
- generatedType = if (cast) r else BOOL;
+ generatedType = if (cast) r else BOOL
ctx1
}
genLoadApply1
@@ -773,7 +640,7 @@ abstract class GenICode extends SubComponent {
// on the stack (contrary to what the type in the AST says).
case Apply(fun @ Select(Super(_, mix), _), args) =>
def genLoadApply2 = {
- debuglog("Call to super: " + tree);
+ debuglog("Call to super: " + tree)
val invokeStyle = SuperCall(mix)
// if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
@@ -819,31 +686,15 @@ abstract class GenICode extends SubComponent {
debugassert(ctor.owner == cls,
"Symbol " + ctor.owner.fullName + " is different than " + tpt)
- val ctx2 = if (forMSIL && loaders.clrTypes.isNonEnumValuetype(cls)) {
- /* parameterful constructors are the only possible custom constructors,
- a default constructor can't be defined for valuetypes, CLR dixit */
- val isDefaultConstructor = args.isEmpty
- if (isDefaultConstructor) {
- msil_genLoadZeroOfNonEnumValuetype(ctx, rt, tree.pos, leaveAddressOnStackInstead = false)
- ctx
- } else {
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
- ctx1.bb.emit(CIL_NEWOBJ(ctor), tree.pos)
- ctx1
- }
- } else {
- val nw = NEW(rt)
- ctx.bb.emit(nw, tree.pos)
- ctx.bb.emit(DUP(generatedType))
- val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
-
- val init = CALL_METHOD(ctor, Static(true))
- nw.init = init
- ctx1.bb.emit(init, tree.pos)
- ctx1
- }
- ctx2
+ val nw = NEW(rt)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val init = CALL_METHOD(ctor, Static(onInstance = true))
+ nw.init = init
+ ctx1.bb.emit(init, tree.pos)
+ ctx1
case _ =>
abort("Cannot instantiate " + tpt + " of kind: " + generatedType)
}
@@ -852,14 +703,14 @@ abstract class GenICode extends SubComponent {
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
def genLoadApply4 = {
- debuglog("BOX : " + fun.symbol.fullName);
+ debuglog("BOX : " + fun.symbol.fullName)
val ctx1 = genLoad(expr, ctx, toTypeKind(expr.tpe))
val nativeKind = toTypeKind(expr.tpe)
- if (settings.Xdce.value) {
+ if (settings.Xdce) {
// we store this boxed value to a local, even if not really needed.
// boxing optimization might use it, and dead code elimination will
// take care of unnecessary stores
- var loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
+ val loc1 = ctx.makeLocal(tree.pos, expr.tpe, "boxed")
ctx1.bb.emit(STORE_LOCAL(loc1))
ctx1.bb.emit(LOAD_LOCAL(loc1))
}
@@ -877,12 +728,6 @@ abstract class GenICode extends SubComponent {
ctx1.bb.emit(UNBOX(boxType), expr.pos)
ctx1
- case Apply(fun @ _, List(expr)) if (forMSIL && loaders.clrTypes.isAddressOf(fun.symbol)) =>
- debuglog("ADDRESSOF : " + fun.symbol.fullName);
- val ctx1 = msil_genLoadAddressOf(expr, ctx, toTypeKind(expr.tpe), butRawValueIsAlsoGoodEnough = false)
- generatedType = toTypeKind(fun.symbol.tpe.resultType)
- ctx1
-
case app @ Apply(fun, args) =>
def genLoadApply6 = {
val sym = fun.symbol
@@ -908,35 +753,28 @@ abstract class GenICode extends SubComponent {
// (if it's not in ignore mode, double-closing is an error)
val ctx1 = genLoadLabelArguments(args, label, ctx)
ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
- ctx1.bb.enterIgnoreMode
+ ctx1.bb.enterIgnoreMode()
ctx1
} else if (isPrimitive(sym)) { // primitive method call
val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
generatedType = resKind
newCtx
} else { // normal method call
- debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
+ debuglog("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember)
val invokeStyle =
if (sym.isStaticMember)
- Static(false)
+ Static(onInstance = false)
else if (sym.isPrivate || sym.isClassConstructor)
- Static(true)
+ Static(onInstance = true)
else
Dynamic
- var ctx1 =
- if (invokeStyle.hasInstance) {
- if (forMSIL && !(invokeStyle.isInstanceOf[SuperCall]) && msil_IsValuetypeInstMethod(sym))
- msil_genLoadQualifierAddress(fun, ctx)
- else
- genLoadQualifier(fun, ctx)
- } else ctx
-
+ var ctx1 = if (invokeStyle.hasInstance) genLoadQualifier(fun, ctx) else ctx
ctx1 = genLoadArguments(args, sym.info.paramTypes, ctx1)
val cm = CALL_METHOD(sym, invokeStyle)
- /** In a couple cases, squirrel away a little extra information in the
- * CALL_METHOD for use by GenJVM.
+ /* In a couple cases, squirrel away a little extra information in the
+ * CALL_METHOD for use by GenASM.
*/
fun match {
case Select(qual, _) =>
@@ -960,14 +798,15 @@ abstract class GenICode extends SubComponent {
}
generatedType =
if (sym.isClassConstructor) UNIT
- else toTypeKind(sym.info.resultType);
+ else toTypeKind(sym.info.resultType)
+ // deal with methods that return Null
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
ctx1
}
}
genLoadApply6
case ApplyDynamic(qual, args) =>
- assert(!forMSIL, tree)
// TODO - this is where we'd catch dynamic applies for invokedynamic.
sys.error("No invokedynamic support yet.")
// val ctx1 = genLoad(qual, ctx, ObjectReference)
@@ -1006,16 +845,22 @@ abstract class GenICode extends SubComponent {
generatedType = toTypeKind(sym.info)
val hostClass = findHostClass(qualifier.tpe, sym)
log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+ def genLoadQualUnlessElidable: Context =
+ if (qualSafeToElide) ctx else genLoadQualifier(tree, ctx)
if (sym.isModule) {
- genLoadModule(ctx, tree)
- }
- else if (sym.isStaticMember) {
- ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos)
- ctx
+ genLoadModule(genLoadQualUnlessElidable, tree)
} else {
- val ctx1 = genLoadQualifier(tree, ctx)
- ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos)
+ val isStatic = sym.isStaticMember
+ val ctx1 = if (isStatic) genLoadQualUnlessElidable
+ else genLoadQualifier(tree, ctx)
+ ctx1.bb.emit(LOAD_FIELD(sym, isStatic) setHostClass hostClass, tree.pos)
+ // it's tempting to drop field accesses of type Null instead of adapting them,
+ // but field access can cause static class init so we can't. Besides, fields
+ // of type Null probably aren't common enough to figure out an optimization
+ adaptNullRef(generatedType, expectedType, ctx1, tree.pos)
ctx1
}
}
@@ -1047,16 +892,16 @@ abstract class GenICode extends SubComponent {
def genLoadLiteral = {
if (value.tag != UnitTag) (value.tag, expectedType) match {
case (IntTag, LONG) =>
- ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.longValue)), tree.pos)
generatedType = LONG
case (FloatTag, DOUBLE) =>
- ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos);
+ ctx.bb.emit(CONSTANT(Constant(value.doubleValue)), tree.pos)
generatedType = DOUBLE
case (NullTag, _) =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = NullReference
case _ =>
- ctx.bb.emit(CONSTANT(value), tree.pos);
+ ctx.bb.emit(CONSTANT(value), tree.pos)
generatedType = toTypeKind(tree.tpe)
}
ctx
@@ -1064,10 +909,10 @@ abstract class GenICode extends SubComponent {
genLoadLiteral
case Block(stats, expr) =>
- ctx.enterScope
+ ctx.enterScope()
var ctx1 = genStat(stats, ctx)
ctx1 = genLoad(expr, ctx1, expectedType)
- ctx1.exitScope
+ ctx1.exitScope()
ctx1
case Typed(Super(_, _), _) =>
@@ -1104,9 +949,11 @@ abstract class GenICode extends SubComponent {
case Match(selector, cases) =>
def genLoadMatch = {
- debuglog("Generating SWITCH statement.");
- var ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
- val afterCtx = ctx1.newBlock
+ debuglog("Generating SWITCH statement.")
+ val ctx1 = genLoad(selector, ctx, INT) // TODO: Java 7 allows strings in switches (so, don't assume INT and don't convert the literals using intValue)
+ val afterCtx = ctx1.newBlock()
+ afterCtx.bb killIf ctx1.bb.ignore
+ var afterCtxReachable = false
var caseCtx: Context = null
generatedType = toTypeKind(tree.tpe)
@@ -1116,7 +963,8 @@ abstract class GenICode extends SubComponent {
for (caze @ CaseDef(pat, guard, body) <- cases) {
assert(guard == EmptyTree, guard)
- val tmpCtx = ctx1.newBlock
+ val tmpCtx = ctx1.newBlock()
+ tmpCtx.bb killIf ctx1.bb.ignore
pat match {
case Literal(value) =>
tags = value.intValue :: tags
@@ -1138,12 +986,15 @@ abstract class GenICode extends SubComponent {
}
caseCtx = genLoad(body, tmpCtx, generatedType)
+ afterCtxReachable ||= !caseCtx.bb.ignore
// close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body)
caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos)
}
+ afterCtxReachable ||= (default == afterCtx)
ctx1.bb.emitOnly(
SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
)
+ afterCtx.bb killUnless afterCtxReachable
afterCtx
}
genLoadMatch
@@ -1164,28 +1015,60 @@ abstract class GenICode extends SubComponent {
resCtx
}
+ /**
+ * If we have a method call, field load, or array element load of type Null then
+ * we need to convince the JVM that we have a null value because in Scala
+ * land Null is a subtype of all ref types, but in JVM land scala.runtime.Null$
+ * is not. Note we don't have to adapt loads of locals because the JVM type
+ * system for locals does have a null type which it tracks internally. As
+ * long as we adapt these other things, the JVM will know that a Scala local of
+ * type Null is holding a null.
+ */
+ private def adaptNullRef(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ log(s"GenICode#adaptNullRef($from, $to, $ctx, $pos)")
+
+ // Don't need to adapt null to unit because we'll just drop it anyway. Don't
+ // need to adapt to Object or AnyRef because the JVM is happy with
+ // upcasting Null to them.
+ // We do have to adapt from NullReference to NullReference because we could be storing
+ // this value into a local of type Null and we want the JVM to see that it's
+ // a null value so we don't have to also adapt local loads.
+ if (from == NullReference && to != UNIT && to != ObjectReference && to != AnyRefReference) {
+ assert(to.isRefOrArrayType, s"Attempt to adapt a null to a non reference type $to.")
+ // adapt by dropping what we've got and pushing a null which
+ // will convince the JVM we really do have null
+ ctx.bb.emit(DROP(from), pos)
+ ctx.bb.emit(CONSTANT(Constant(null)), pos)
+ }
+ }
+
private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
// An awful lot of bugs explode here - let's leave ourselves more clues.
// A typical example is an overloaded type assigned after typer.
log(s"GenICode#adapt($from, $to, $ctx, $pos)")
- val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- def checkAssertions() {
- def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
- debugassert(from != UNIT, msg)
- assert(!from.isReferenceType && !to.isReferenceType, msg)
- }
- if (conforms) from match {
- case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
- case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
- case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
- case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
- case _ => ()
- }
- else to match {
- case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
- case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
+
+ (from, to) match {
+ // The JVM doesn't have a Nothing equivalent, so it doesn't know that a method of type Nothing can't actually return. So for instance, with
+ // def f: String = ???
+ // we need
+ // 0: getstatic #25; //Field scala/Predef$.MODULE$:Lscala/Predef$;
+ // 3: invokevirtual #29; //Method scala/Predef$.$qmark$qmark$qmark:()Lscala/runtime/Nothing$;
+ // 6: athrow
+ // So this case tacks on the ahtrow which makes the JVM happy because class Nothing is declared as a subclass of Throwable
+ case (NothingReference, _) =>
+ ctx.bb.emit(THROW(ThrowableClass))
+ ctx.bb.enterIgnoreMode()
+ case _ if from isAssignabledTo to =>
+ ()
+ case (_, UNIT) =>
+ ctx.bb.emit(DROP(from), pos)
+ // otherwise we'd better be doing a primtive -> primitive coercion or there's a problem
+ case _ if !from.isRefOrArrayType && !to.isRefOrArrayType =>
+ coerce(from, to)
+ case _ =>
+ assert(false, s"Can't convert from $from to $to in unit ${unit.source} at $pos")
}
}
@@ -1198,15 +1081,6 @@ abstract class GenICode extends SubComponent {
abort("Unknown qualifier " + tree)
}
- /** forMSIL */
- private def msil_genLoadQualifierAddress(tree: Tree, ctx: Context): Context =
- tree match {
- case Select(qualifier, _) =>
- msil_genLoadAddressOf(qualifier, ctx, toTypeKind(qualifier.tpe), butRawValueIsAlsoGoodEnough = false)
- case _ =>
- abort("Unknown qualifier " + tree)
- }
-
/**
* Generate code that loads args into label parameters.
*/
@@ -1253,7 +1127,9 @@ abstract class GenICode extends SubComponent {
if (!tree.symbol.isPackageClass) tree.symbol
else tree.symbol.info.member(nme.PACKAGE) match {
case NoSymbol => abort("Cannot use package as value: " + tree)
- case s => debugwarn("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ case s =>
+ devWarning(s"Found ${tree.symbol} where a package object is required. Converting to ${s.moduleClass}")
+ s.moduleClass
}
)
debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym))
@@ -1387,18 +1263,14 @@ abstract class GenICode extends SubComponent {
// }
/** Generate string concatenation.
- *
- * @param tree ...
- * @param ctx ...
- * @return ...
*/
def genStringConcat(tree: Tree, ctx: Context): Context = {
liftStringConcat(tree) match {
// Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
- case List(Literal(Constant("")), arg) if !forMSIL =>
+ case List(Literal(Constant("")), arg) =>
debuglog("Rewriting \"\" + x as String.valueOf(x) for: " + arg)
val ctx1 = genLoad(arg, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(false)), arg.pos)
+ ctx1.bb.emit(CALL_METHOD(String_valueOf, Static(onInstance = false)), arg.pos)
ctx1
case concatenations =>
debuglog("Lifted string concatenations for " + tree + "\n to: " + concatenations)
@@ -1423,7 +1295,7 @@ abstract class GenICode extends SubComponent {
}
val ctx1 = genLoad(tree, ctx, ObjectReference)
- ctx1.bb.emit(CALL_METHOD(hashMethod, Static(false)))
+ ctx1.bb.emit(CALL_METHOD(hashMethod, Static(onInstance = false)))
ctx1
}
@@ -1479,9 +1351,17 @@ abstract class GenICode extends SubComponent {
private def genCond(tree: Tree,
ctx: Context,
thenCtx: Context,
- elseCtx: Context): Unit =
+ elseCtx: Context): Boolean =
{
- def genComparisonOp(l: Tree, r: Tree, code: Int) {
+ /**
+ * Generate the de-sugared comparison mechanism that will underly an '=='
+ *
+ * @param l left-hand side of the '=='
+ * @param r right-hand side of the '=='
+ * @param code the comparison operator to use
+ * @return true if either branch can continue normally to a follow on block, false otherwise
+ */
+ def genComparisonOp(l: Tree, r: Tree, code: Int): Boolean = {
val op: TestOp = code match {
case scalaPrimitives.LT => LT
case scalaPrimitives.LE => LE
@@ -1497,27 +1377,33 @@ abstract class GenICode extends SubComponent {
lazy val nonNullSide = ifOneIsNull(l, r)
if (isReferenceEqualityOp(code) && nonNullSide != null) {
val ctx1 = genLoad(nonNullSide, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.emitOnly(
CZJUMP(thenCtx.bb, elseCtx.bb, op, ObjectReference)
)
+ branchesReachable
}
else {
val kind = getMaxType(l.tpe :: r.tpe :: Nil)
var ctx1 = genLoad(l, ctx, kind)
ctx1 = genLoad(r, ctx1, kind)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.emitOnly(
CJUMP(thenCtx.bb, elseCtx.bb, op, kind) setPos r.pos
)
+ branchesReachable
}
}
- debuglog("Entering genCond with tree: " + tree);
+ debuglog("Entering genCond with tree: " + tree)
// the default emission
- def default() = {
+ def default(): Boolean = {
val ctx1 = genLoad(tree, ctx, BOOL)
+ val branchesReachable = !ctx1.bb.ignore
ctx1.bb.closeWith(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) setPos tree.pos)
+ branchesReachable
}
tree match {
@@ -1529,11 +1415,12 @@ abstract class GenICode extends SubComponent {
lazy val Select(lhs, _) = fun
lazy val rhs = args.head
- def genZandOrZor(and: Boolean) = {
- val ctxInterm = ctx.newBlock
+ def genZandOrZor(and: Boolean): Boolean = {
+ val ctxInterm = ctx.newBlock()
- if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
+ val branchesReachable = if (and) genCond(lhs, ctx, ctxInterm, elseCtx)
else genCond(lhs, ctx, thenCtx, ctxInterm)
+ ctxInterm.bb killUnless branchesReachable
genCond(rhs, ctxInterm, thenCtx, elseCtx)
}
@@ -1556,10 +1443,10 @@ abstract class GenICode extends SubComponent {
else if (isComparisonOp(code))
genComparisonOp(lhs, rhs, code)
else
- default
+ default()
}
- case _ => default
+ case _ => default()
}
}
@@ -1572,17 +1459,18 @@ abstract class GenICode extends SubComponent {
* @param ctx current context
* @param thenCtx target context if the comparison yields true
* @param elseCtx target context if the comparison yields false
+ * @return true if either branch can continue normally to a follow on block, false otherwise
*/
- def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Unit = {
+ def genEqEqPrimitive(l: Tree, r: Tree, ctx: Context)(thenCtx: Context, elseCtx: Context): Boolean = {
def getTempLocal = ctx.method.lookupLocal(nme.EQEQ_LOCAL_VAR) getOrElse {
- ctx.makeLocal(l.pos, AnyRefClass.tpe, nme.EQEQ_LOCAL_VAR)
+ ctx.makeLocal(l.pos, AnyRefTpe, nme.EQEQ_LOCAL_VAR.toString)
}
- /** True if the equality comparison is between values that require the use of the rich equality
- * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
- * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
- * When it is statically known that both sides are equal and subtypes of Number of Character,
- * not using the rich equality is possible (their own equals method will do ok.)*/
+ /* True if the equality comparison is between values that require the use of the rich equality
+ * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+ * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
+ * When it is statically known that both sides are equal and subtypes of Number of Character,
+ * not using the rich equality is possible (their own equals method will do ok.)*/
def mustUseAnyComparator: Boolean = {
def areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
!areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
@@ -1591,7 +1479,7 @@ abstract class GenICode extends SubComponent {
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
val equalsMethod =
- if (!settings.optimise.value) {
+ if (!settings.optimise) {
def default = platform.externalEquals
platform match {
case x: JavaPlatform =>
@@ -1613,26 +1501,40 @@ abstract class GenICode extends SubComponent {
val ctx1 = genLoad(l, ctx, ObjectReference)
val ctx2 = genLoad(r, ctx1, ObjectReference)
+ val branchesReachable = !ctx2.bb.ignore
ctx2.bb.emitOnly(
- CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)),
+ CALL_METHOD(equalsMethod, if (settings.optimise) Dynamic else Static(onInstance = false)),
CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
)
+ branchesReachable
}
else {
- if (isNull(l))
+ if (isNull(l)) {
// null == expr -> expr eq null
- genLoad(r, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
- else if (isNull(r)) {
+ val ctx1 = genLoad(r, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ branchesReachable
+ } else if (isNull(r)) {
// expr == null -> expr eq null
- genLoad(l, ctx, ObjectReference).bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ val ctx1 = genLoad(l, ctx, ObjectReference)
+ val branchesReachable = !ctx1.bb.ignore
+ ctx1.bb emitOnly CZJUMP(thenCtx.bb, elseCtx.bb, EQ, ObjectReference)
+ branchesReachable
} else {
val eqEqTempLocal = getTempLocal
var ctx1 = genLoad(l, ctx, ObjectReference)
- lazy val nonNullCtx = ctx1.newBlock
+ val branchesReachable = !ctx1.bb.ignore
+ lazy val nonNullCtx = {
+ val block = ctx1.newBlock()
+ block.bb killUnless branchesReachable
+ block
+ }
// l == r -> if (l eq null) r eq null else l.equals(r)
ctx1 = genLoad(r, ctx1, ObjectReference)
- val nullCtx = ctx1.newBlock
+ val nullCtx = ctx1.newBlock()
+ nullCtx.bb killUnless branchesReachable
ctx1.bb.emitOnly(
STORE_LOCAL(eqEqTempLocal) setPos l.pos,
@@ -1649,6 +1551,7 @@ abstract class GenICode extends SubComponent {
CALL_METHOD(Object_equals, Dynamic),
CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)
)
+ branchesReachable
}
}
}
@@ -1661,12 +1564,12 @@ abstract class GenICode extends SubComponent {
debugassert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
- /** Non-method term members are fields, except for module members. Module
- * members can only happen on .NET (no flatten) for inner traits. There,
- * a module symbol is generated (transformInfo in mixin) which is used
- * as owner for the members of the implementation class (so that the
- * backend emits them as static).
- * No code is needed for this module symbol.
+ /* Non-method term members are fields, except for module members. Module
+ * members can only happen on .NET (no flatten) for inner traits. There,
+ * a module symbol is generated (transformInfo in mixin) which is used
+ * as owner for the members of the implementation class (so that the
+ * backend emits them as static).
+ * No code is needed for this module symbol.
*/
for (f <- cls.info.decls ; if !f.isMethod && f.isTerm && !f.isModule)
ctx.clazz addField new IField(f)
@@ -1704,8 +1607,6 @@ abstract class GenICode extends SubComponent {
* If the block consists of a single unconditional jump, prune
* it by replacing the instructions in the predecessor to jump
* directly to the JUMP target of the block.
- *
- * @param method ...
*/
def prune(method: IMethod) = {
var changed = false
@@ -1717,14 +1618,14 @@ abstract class GenICode extends SubComponent {
case _ => None
}
if (block.size == 1 && optCont.isDefined) {
- val Some(cont) = optCont;
- val pred = block.predecessors;
- debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")");
+ val Some(cont) = optCont
+ val pred = block.predecessors
+ debuglog("Preds: " + pred + " of " + block + " (" + optCont + ")")
pred foreach { p =>
changed = true
p.lastInstruction match {
case CJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty if branch.");
+ debuglog("Pruning empty if branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1737,7 +1638,7 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds: " + method + " " + block + " " + pred + " " + p))
case CZJUMP(succ, fail, cond, kind) if (succ == block || fail == block) =>
- debuglog("Pruning empty ifz branch.");
+ debuglog("Pruning empty ifz branch.")
p.replaceInstruction(p.lastInstruction,
if (block == succ)
if (block == fail)
@@ -1750,12 +1651,12 @@ abstract class GenICode extends SubComponent {
abort("Could not find block in preds"))
case JUMP(b) if (b == block) =>
- debuglog("Pruning empty JMP branch.");
+ debuglog("Pruning empty JMP branch.")
val replaced = p.replaceInstruction(p.lastInstruction, JUMP(cont))
debugassert(replaced, "Didn't find p.lastInstruction")
case SWITCH(tags, labels) if (labels contains block) =>
- debuglog("Pruning empty SWITCH branch.");
+ debuglog("Pruning empty SWITCH branch.")
p.replaceInstruction(p.lastInstruction,
SWITCH(tags, labels map (l => if (l == block) cont else l)))
@@ -1771,7 +1672,7 @@ abstract class GenICode extends SubComponent {
e.covered = e.covered filter (_ != block)
e.blocks = e.blocks filter (_ != block)
if (e.startBlock eq block)
- e setStartBlock cont;
+ e setStartBlock cont
}
}
}
@@ -1783,7 +1684,7 @@ abstract class GenICode extends SubComponent {
method.blocks foreach prune0
} while (changed)
- debuglog("Prune fixpoint reached in " + n + " iterations.");
+ debuglog("Prune fixpoint reached in " + n + " iterations.")
}
def getMaxType(ts: List[Type]): TypeKind =
@@ -1825,9 +1726,7 @@ abstract class GenICode extends SubComponent {
t match {
case t @ Apply(_, args) if sym.isLabel && !boundLabels(sym) =>
val newSym = getLabel(sym.pos, sym.name)
- val tree = Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos
- tree.tpe = t.tpe
- tree
+ Apply(global.gen.mkAttributedRef(newSym), transformTrees(args)) setPos t.pos setType t.tpe
case t @ LabelDef(name, params, rhs) =>
val newSym = getLabel(t.pos, name)
@@ -1957,22 +1856,11 @@ abstract class GenICode extends SubComponent {
}
def addFinalizer(f: Tree, ctx: Context): this.type = {
- cleanups = Finalizer(f, ctx) :: cleanups;
- this
- }
-
- def removeFinalizer(f: Tree): this.type = {
- assert(cleanups.head contains f,
- "Illegal nesting of cleanup operations: " + cleanups + " while exiting finalizer " + f);
- cleanups = cleanups.tail
+ cleanups = Finalizer(f, ctx) :: cleanups
this
}
/** Prepare a new context upon entry into a method.
- *
- * @param m ...
- * @param d ...
- * @return ...
*/
def enterMethod(m: IMethod, d: DefDef): Context = {
val ctx1 = new Context(this) setMethod(m)
@@ -1981,13 +1869,13 @@ abstract class GenICode extends SubComponent {
ctx1.bb = ctx1.method.startBlock
ctx1.defdef = d
ctx1.scope = EmptyScope
- ctx1.enterScope
+ ctx1.enterScope()
ctx1
}
/** Return a new context for a new basic block. */
def newBlock(): Context = {
- val block = method.code.newBlock
+ val block = method.code.newBlock()
handlers foreach (_ addCoveredBlock block)
currentExceptionHandlers foreach (_ addBlock block)
block.varsInScope.clear()
@@ -2011,13 +1899,12 @@ abstract class GenICode extends SubComponent {
* 'covered' by this exception handler (in addition to the
* previously active handlers).
*/
- private def newExceptionHandler(cls: Symbol, resultKind: TypeKind, pos: Position): ExceptionHandler = {
+ private def newExceptionHandler(cls: Symbol, pos: Position): ExceptionHandler = {
handlerCount += 1
val exh = new ExceptionHandler(method, newTermNameCached("" + handlerCount), cls, pos)
- exh.resultKind = resultKind
method.addHandler(exh)
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
exh
}
@@ -2027,7 +1914,7 @@ abstract class GenICode extends SubComponent {
private def addActiveHandler(exh: ExceptionHandler) {
handlerCount += 1
handlers = exh :: handlers
- debuglog("added handler: " + exh);
+ debuglog("added handler: " + exh)
}
/** Return a new context for generating code for the given
@@ -2035,7 +1922,7 @@ abstract class GenICode extends SubComponent {
*/
private def enterExceptionHandler(exh: ExceptionHandler): Context = {
currentExceptionHandlers ::= exh
- val ctx = newBlock
+ val ctx = newBlock()
exh.setStartBlock(ctx.bb)
ctx
}
@@ -2044,16 +1931,6 @@ abstract class GenICode extends SubComponent {
currentExceptionHandlers = currentExceptionHandlers.tail
}
- /** Remove the given handler from the list of active exception handlers. */
- def removeActiveHandler(exh: ExceptionHandler): Unit = {
- assert(handlerCount > 0 && handlers.head == exh,
- "Wrong nesting of exception handlers." + this + " for " + exh)
- handlerCount -= 1
- handlers = handlers.tail
- debuglog("removed handler: " + exh);
-
- }
-
/** Clone the current context */
def dup: Context = new Context(this)
@@ -2072,23 +1949,55 @@ abstract class GenICode extends SubComponent {
* It returns the resulting context, with the same active handlers as
* before the call. Use it like:
*
- * <code> ctx.Try( ctx => {
+ * ` ctx.Try( ctx => {
* ctx.bb.emit(...) // protected block
* }, (ThrowableClass,
* ctx => {
* ctx.bb.emit(...); // exception handler
* }), (AnotherExceptionClass,
* ctx => {...
- * } ))</code>
+ * } ))`
+ *
+ * The resulting structure will look something like
+ *
+ * outer:
+ * // this 'useless' jump will be removed later,
+ * // for now it separates the try body's blocks from previous
+ * // code since the try body needs its own exception handlers
+ * JUMP body
+ *
+ * body:
+ * [ try body ]
+ * JUMP normalExit
+ *
+ * catch[i]:
+ * [ handler[i] body ]
+ * JUMP normalExit
+ *
+ * catchAll:
+ * STORE exception
+ * [ finally body ]
+ * THROW exception
+ *
+ * normalExit:
+ * [ finally body ]
+ *
+ * each catch[i] will cover body. catchAll will cover both body and each catch[i]
+ * Additional finally copies are created on the emission of every RETURN in the try body and exception handlers.
+ *
+ * This could result in unreachable code which has to be cleaned up later, e.g. if the try and all the exception
+ * handlers always end in RETURN then there will be no "normal" flow out of the try/catch/finally.
+ * Later reachability analysis will remove unreacahble code.
*/
def Try(body: Context => Context,
handlers: List[(Symbol, TypeKind, Context => Context)],
finalizer: Tree,
- tree: Tree) = if (forMSIL) TryMsil(body, handlers, finalizer, tree) else {
+ tree: Tree) = {
- val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
+ val outerCtx = this.dup // context for generating exception handlers, covered by the catch-all finalizer
val finalizerCtx = this.dup // context for generating finalizer handler
- val afterCtx = outerCtx.newBlock
+ val normalExitCtx = outerCtx.newBlock() // context where flow will go on a "normal" (non-return, non-throw) exit from a try or catch handler
+ var normalExitReachable = false
var tmp: Local = null
val kind = toTypeKind(tree.tpe)
val guardResult = kind != UNIT && mayCleanStack(finalizer)
@@ -2102,7 +2011,8 @@ abstract class GenICode extends SubComponent {
}
def emitFinalizer(ctx: Context): Context = if (!finalizer.isEmpty) {
- val ctx1 = finalizerCtx.dup.newBlock
+ val ctx1 = finalizerCtx.dup.newBlock()
+ ctx1.bb killIf ctx.bb.ignore
ctx.bb.closeWith(JUMP(ctx1.bb))
if (guardResult) {
@@ -2115,107 +2025,53 @@ abstract class GenICode extends SubComponent {
} else ctx
- val finalizerExh = if (finalizer != EmptyTree) Some({
- val exh = outerCtx.newExceptionHandler(NoSymbol, toTypeKind(finalizer.tpe), finalizer.pos) // finalizer covers exception handlers
- this.addActiveHandler(exh) // .. and body aswell
- val ctx = finalizerCtx.enterExceptionHandler(exh)
- val exception = ctx.makeLocal(finalizer.pos, ThrowableClass.tpe, "exc")
- loadException(ctx, exh, finalizer.pos)
- ctx.bb.emit(STORE_LOCAL(exception));
- val ctx1 = genLoad(finalizer, ctx, UNIT);
- ctx1.bb.emit(LOAD_LOCAL(exception));
- ctx1.bb.emit(THROW(ThrowableClass));
- ctx1.bb.enterIgnoreMode;
- ctx1.bb.close
- finalizerCtx.endHandler()
- exh
- }) else None
-
- val exhs = handlers.map { case (sym, kind, handler) => // def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
- val exh = this.newExceptionHandler(sym, kind, tree.pos)
- var ctx1 = outerCtx.enterExceptionHandler(exh)
- ctx1.addFinalizer(finalizer, finalizerCtx)
- loadException(ctx1, exh, tree.pos)
- ctx1 = handler(ctx1)
- // emit finalizer
- val ctx2 = emitFinalizer(ctx1)
- ctx2.bb.closeWith(JUMP(afterCtx.bb))
- outerCtx.endHandler()
- exh
+ // Generate the catch-all exception handler that deals with uncaught exceptions coming
+ // from the try or exception handlers. It catches the exception, runs the finally code, then rethrows
+ // the exception
+ if (settings.YdisableUnreachablePrevention || !outerCtx.bb.ignore) {
+ if (finalizer != EmptyTree) {
+ val exh = outerCtx.newExceptionHandler(NoSymbol, finalizer.pos) // finalizer covers exception handlers
+ this.addActiveHandler(exh) // .. and body aswell
+ val exhStartCtx = finalizerCtx.enterExceptionHandler(exh)
+ exhStartCtx.bb killIf outerCtx.bb.ignore
+ val exception = exhStartCtx.makeLocal(finalizer.pos, ThrowableTpe, "exc")
+ loadException(exhStartCtx, exh, finalizer.pos)
+ exhStartCtx.bb.emit(STORE_LOCAL(exception))
+ val exhEndCtx = genLoad(finalizer, exhStartCtx, UNIT)
+ exhEndCtx.bb.emit(LOAD_LOCAL(exception))
+ exhEndCtx.bb.closeWith(THROW(ThrowableClass))
+ exhEndCtx.bb.enterIgnoreMode()
+ finalizerCtx.endHandler()
}
- val bodyCtx = this.newBlock
- if (finalizer != EmptyTree)
- bodyCtx.addFinalizer(finalizer, finalizerCtx)
-
- var finalCtx = body(bodyCtx)
- finalCtx = emitFinalizer(finalCtx)
-
- outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
-
- finalCtx.bb.closeWith(JUMP(afterCtx.bb))
-
- afterCtx
- }
-
-
- /** try-catch-finally blocks are actually simpler to emit in MSIL, because there
- * is support for `finally` in bytecode.
- *
- * A
- * try { .. } catch { .. } finally { .. }
- * block is de-sugared into
- * try { try { ..} catch { .. } } finally { .. }
- *
- * In ICode `finally` block is represented exactly the same as an exception handler,
- * but with `NoSymbol` as the exception class. The covered blocks are all blocks of
- * the `try { .. } catch { .. }`.
- *
- * Also, TryMsil does not enter any Finalizers into the `cleanups`, because the
- * CLI takes care of running the finalizer when seeing a `leave` statement inside
- * a try / catch.
- */
- def TryMsil(body: Context => Context,
- handlers: List[(Symbol, TypeKind, (Context => Context))],
- finalizer: Tree,
- tree: Tree) = {
- val outerCtx = this.dup // context for generating exception handlers, covered by finalizer
- val finalizerCtx = this.dup // context for generating finalizer handler
- val afterCtx = outerCtx.newBlock
-
- if (finalizer != EmptyTree) {
- // finalizer is covers try and all catch blocks, i.e.
- // try { try { .. } catch { ..} } finally { .. }
- val exh = outerCtx.newExceptionHandler(NoSymbol, UNIT, tree.pos)
- this.addActiveHandler(exh)
- val ctx = finalizerCtx.enterExceptionHandler(exh)
- loadException(ctx, exh, tree.pos)
- val ctx1 = genLoad(finalizer, ctx, UNIT)
- // need jump for the ICode to be valid. MSIL backend will emit `Endfinally` instead.
- ctx1.bb.closeWith(JUMP(afterCtx.bb))
- finalizerCtx.endHandler()
- }
-
- for (handler <- handlers) {
- val exh = this.newExceptionHandler(handler._1, handler._2, tree.pos)
- var ctx1 = outerCtx.enterExceptionHandler(exh)
- loadException(ctx1, exh, tree.pos)
- ctx1 = handler._3(ctx1)
- // msil backend will emit `Leave` to jump out of a handler
- ctx1.bb.closeWith(JUMP(afterCtx.bb))
- outerCtx.endHandler()
+ // Generate each exception handler
+ for ((sym, kind, handler) <- handlers) {
+ val exh = this.newExceptionHandler(sym, tree.pos)
+ val exhStartCtx = outerCtx.enterExceptionHandler(exh)
+ exhStartCtx.bb killIf outerCtx.bb.ignore
+ exhStartCtx.addFinalizer(finalizer, finalizerCtx)
+ loadException(exhStartCtx, exh, tree.pos)
+ val exhEndCtx = handler(exhStartCtx)
+ normalExitReachable ||= !exhEndCtx.bb.ignore
+ exhEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
+ outerCtx.endHandler()
+ }
}
- val bodyCtx = this.newBlock
+ val bodyCtx = this.newBlock()
+ bodyCtx.bb killIf outerCtx.bb.ignore
+ if (finalizer != EmptyTree)
+ bodyCtx.addFinalizer(finalizer, finalizerCtx)
- val finalCtx = body(bodyCtx)
+ val bodyEndCtx = body(bodyCtx)
outerCtx.bb.closeWith(JUMP(bodyCtx.bb))
- // msil backend will emit `Leave` to jump out of a try-block
- finalCtx.bb.closeWith(JUMP(afterCtx.bb))
+ normalExitReachable ||= !bodyEndCtx.bb.ignore
+ normalExitCtx.bb killUnless normalExitReachable
+ bodyEndCtx.bb.closeWith(JUMP(normalExitCtx.bb))
- afterCtx
+ emitFinalizer(normalExitCtx)
}
}
}
@@ -2249,7 +2105,7 @@ abstract class GenICode extends SubComponent {
/** Add an instruction that refers to this label. */
def addCallingInstruction(i: Instruction) =
- toPatch = i :: toPatch;
+ toPatch = i :: toPatch
/**
* Patch the code by replacing pseudo call instructions with
@@ -2304,14 +2160,13 @@ abstract class GenICode extends SubComponent {
* by a real JUMP instruction when all labels are resolved.
*/
abstract class PseudoJUMP(label: Label) extends Instruction {
- override def toString(): String = "PJUMP " + label.symbol
-
+ override def toString = s"PJUMP(${label.symbol})"
override def consumed = 0
override def produced = 0
// register with the given label
if (!label.anchored)
- label.addCallingInstruction(this);
+ label.addCallingInstruction(this)
}
case class PJUMP(whereto: Label) extends PseudoJUMP(whereto)
@@ -2341,7 +2196,6 @@ abstract class GenICode extends SubComponent {
val locals: ListBuffer[Local] = new ListBuffer
def add(l: Local) = locals += l
- def remove(l: Local) = locals -= l
/** Return all locals that are in scope. */
def varsInScope: Buffer[Local] = outer.varsInScope.clone() ++= locals
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
index f05def3123..0cdf629ce1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodeCheckers.scala
@@ -9,7 +9,6 @@ package icode
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import scala.tools.nsc.symtab._
abstract class ICodeCheckers {
val global: Global
@@ -49,7 +48,7 @@ abstract class ICodeCheckers {
* @author Iulian Dragos
* @version 1.0, 06/09/2005
*
- * @todo Better checks for <code>MONITOR_ENTER/EXIT</code>
+ * @todo Better checks for `MONITOR_ENTER/EXIT`
* Better checks for local var initializations
*
* @todo Iulian says: I think there's some outdated logic in the checker.
@@ -95,7 +94,7 @@ abstract class ICodeCheckers {
}
def checkICodes(): Unit = {
- if (settings.verbose.value)
+ if (settings.verbose)
println("[[consistency check at the beginning of phase " + globalPhase.name + "]]")
classes.values foreach check
}
@@ -103,7 +102,6 @@ abstract class ICodeCheckers {
private def posStr(p: Position) =
if (p.isDefined) p.line.toString else "<??>"
- private def indent(s: String, spaces: Int): String = indent(s, " " * spaces)
private def indent(s: String, prefix: String): String = {
val lines = s split "\\n"
lines map (prefix + _) mkString "\n"
@@ -121,11 +119,11 @@ abstract class ICodeCheckers {
clasz = cls
for (f1 <- cls.fields ; f2 <- cls.fields ; if f1 < f2)
- if (isConfict(f1, f2, false))
+ if (isConfict(f1, f2, canOverload = false))
icodeError("Repetitive field name: " + f1.symbol.fullName)
for (m1 <- cls.methods ; m2 <- cls.methods ; if m1 < m2)
- if (isConfict(m1, m2, true))
+ if (isConfict(m1, m2, canOverload = true))
icodeError("Repetitive method: " + m1.symbol.fullName)
clasz.methods foreach check
@@ -170,12 +168,11 @@ abstract class ICodeCheckers {
val preds = bl.predecessors
def hasNothingType(s: TypeStack) = s.nonEmpty && (s.head == NothingReference)
- def hasNullType(s: TypeStack) = s.nonEmpty && (s.head == NullReference)
- /** XXX workaround #1: one stack empty, the other has BoxedUnit.
- * One example where this arises is:
+ /* XXX workaround #1: one stack empty, the other has BoxedUnit.
+ * One example where this arises is:
*
- * def f(b: Boolean): Unit = synchronized { if (b) () }
+ * def f(b: Boolean): Unit = synchronized { if (b) () }
*/
def allUnits(s: TypeStack) = s.types forall (_ == BoxedUnitReference)
@@ -184,10 +181,10 @@ abstract class ICodeCheckers {
case (x1, x2) if f(x2) => x1
}
- /** XXX workaround #2: different stacks heading into an exception
- * handler which will clear them anyway. Examples where it arises:
+ /* XXX workaround #2: different stacks heading into an exception
+ * handler which will clear them anyway. Examples where it arises:
*
- * var bippy: Int = synchronized { if (b) 5 else 10 }
+ * var bippy: Int = synchronized { if (b) 5 else 10 }
*/
def isHandlerBlock() = bl.exceptionHandlerStart
@@ -211,7 +208,7 @@ abstract class ICodeCheckers {
if (s1.length != s2.length) {
if (allUnits(s1) && allUnits(s2))
workaround("Ignoring mismatched boxed units")
- else if (isHandlerBlock)
+ else if (isHandlerBlock())
workaround("Ignoring mismatched stacks entering exception handler")
else
throw new CheckerException(incompatibleString)
@@ -236,8 +233,8 @@ abstract class ICodeCheckers {
}
if (preds.nonEmpty) {
- in(bl) = (preds map out.apply) reduceLeft meet2;
- log("Input changed for block: " + bl +" to: " + in(bl));
+ in(bl) = (preds map out.apply) reduceLeft meet2
+ log("Input changed for block: " + bl +" to: " + in(bl))
}
}
@@ -296,7 +293,7 @@ abstract class ICodeCheckers {
else prefix + " with initial stack " + initial.types.mkString("[", ", ", "]")
})
- var stack = new TypeStack(initial)
+ val stack = new TypeStack(initial)
def checkStack(len: Int) {
if (stack.length < len)
ICodeChecker.this.icodeError("Expected at least " + len + " elements on the stack", stack)
@@ -324,14 +321,14 @@ abstract class ICodeCheckers {
def popStackN(num: Int, instrFn: () => String = defaultInstrPrinter) = {
List.range(0, num) map { _ =>
val res = _popStack
- printStackString(false, res, instrFn())
+ printStackString(isPush = false, res, instrFn())
res
}
}
def pushStackN(xs: Seq[TypeKind], instrFn: () => String) = {
xs foreach { x =>
stack push x
- printStackString(true, x, instrFn())
+ printStackString(isPush = true, x, instrFn())
}
}
@@ -339,7 +336,7 @@ abstract class ICodeCheckers {
def popStack2 = { checkStack(2) ; (popStackN(2): @unchecked) match { case List(x, y) => (x, y) } }
def popStack3 = { checkStack(3) ; (popStackN(3): @unchecked) match { case List(x, y, z) => (x, y, z) } }
- /** Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
+ /* Called by faux instruction LOAD_EXCEPTION to wipe out the stack. */
def clearStack() = {
if (stack.nonEmpty)
logChecker("Wiping out the " + stack.length + " element stack for exception handler: " + stack)
@@ -354,7 +351,7 @@ abstract class ICodeCheckers {
def typeError(k1: TypeKind, k2: TypeKind) {
icodeError("\n expected: " + k1 + "\n found: " + k2)
}
- def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 <:< k2) || {
+ def isSubtype(k1: TypeKind, k2: TypeKind) = (k1 isAssignabledTo k2) || {
import platform.isMaybeBoxed
(k1, k2) match {
@@ -369,11 +366,6 @@ abstract class ICodeCheckers {
}
}
- /** Return true if k1 is a subtype of any of the following types,
- * according to the somewhat relaxed subtyping standards in effect here.
- */
- def isOneOf(k1: TypeKind, kinds: TypeKind*) = kinds exists (k => isSubtype(k1, k))
-
def subtypeTest(k1: TypeKind, k2: TypeKind): Unit =
if (isSubtype(k1, k2)) ()
else typeError(k2, k1)
@@ -381,20 +373,19 @@ abstract class ICodeCheckers {
for (instr <- b) {
this.instruction = instr
- def checkLocal(local: Local): Unit = {
- method lookupLocal local.sym.name getOrElse {
- icodeError(" " + local + " is not defined in method " + method)
- }
+ def checkLocal(local: Local) {
+ if ((method lookupLocal local.sym.name).isEmpty)
+ icodeError(s" $local is not defined in method $method")
}
def checkField(obj: TypeKind, field: Symbol): Unit = obj match {
case REFERENCE(sym) =>
if (sym.info.member(field.name) == NoSymbol)
- icodeError(" " + field + " is not defined in class " + clasz);
+ icodeError(" " + field + " is not defined in class " + clasz)
case _ =>
- icodeError(" expected reference type, but " + obj + " found");
+ icodeError(" expected reference type, but " + obj + " found")
}
- /** Checks that tpe is a subtype of one of the allowed types */
+ /* Checks that tpe is a subtype of one of the allowed types */
def checkType(tpe: TypeKind, allowed: TypeKind*) = (
if (allowed exists (k => isSubtype(tpe, k))) ()
else icodeError(tpe + " is not one of: " + allowed.mkString("{ ", ", ", " }"))
@@ -402,16 +393,14 @@ abstract class ICodeCheckers {
def checkNumeric(tpe: TypeKind) =
checkType(tpe, BYTE, CHAR, SHORT, INT, LONG, FLOAT, DOUBLE)
- /** Checks that the 2 topmost elements on stack are of the
- * kind TypeKind.
- */
+ /* Checks that the 2 topmost elements on stack are of the kind TypeKind. */
def checkBinop(kind: TypeKind) {
val (a, b) = popStack2
checkType(a, kind)
checkType(b, kind)
}
- /** Check that arguments on the stack match method params. */
+ /* Check that arguments on the stack match method params. */
def checkMethodArgs(method: Symbol) {
val params = method.info.paramTypes
checkStack(params.length)
@@ -421,21 +410,18 @@ abstract class ICodeCheckers {
)
}
- /** Checks that the object passed as receiver has a method
- * <code>method</code> and that it is callable from the current method.
- *
- * @param receiver ...
- * @param method ...
+ /* Checks that the object passed as receiver has a method
+ * `method` and that it is callable from the current method.
*/
def checkMethod(receiver: TypeKind, method: Symbol) =
receiver match {
case REFERENCE(sym) =>
checkBool(sym.info.member(method.name) != NoSymbol,
- "Method " + method + " does not exist in " + sym.fullName);
+ "Method " + method + " does not exist in " + sym.fullName)
if (method.isPrivate)
checkBool(method.owner == clasz.symbol,
"Cannot call private method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
else if (method.isProtected) {
val isProtectedOK = (
(clasz.symbol isSubClass method.owner) ||
@@ -444,7 +430,7 @@ abstract class ICodeCheckers {
checkBool(isProtectedOK,
"Cannot call protected method of " + method.owner.fullName
- + " from " + clasz.symbol.fullName);
+ + " from " + clasz.symbol.fullName)
}
case ARRAY(_) =>
@@ -458,7 +444,7 @@ abstract class ICodeCheckers {
def checkBool(cond: Boolean, msg: String) =
if (!cond) icodeError(msg)
- if (settings.debug.value) {
+ if (settings.debug) {
log("PC: " + instr)
log("stack: " + stack)
log("================")
@@ -476,8 +462,8 @@ abstract class ICodeCheckers {
subtypeTest(elem, kind)
pushStack(elem)
case (a, b) =>
- icodeError(" expected and INT and a array reference, but " +
- a + ", " + b + " found");
+ icodeError(" expected an INT and an array reference, but " +
+ a + ", " + b + " found")
}
case LOAD_LOCAL(local) =>
@@ -495,10 +481,10 @@ abstract class ICodeCheckers {
case LOAD_MODULE(module) =>
checkBool((module.isModule || module.isModuleClass),
- "Expected module: " + module + " flags: " + Flags.flagsToString(module.flags));
- pushStack(toTypeKind(module.tpe));
+ "Expected module: " + module + " flags: " + module.flagString)
+ pushStack(toTypeKind(module.tpe))
- case STORE_THIS(kind) =>
+ case STORE_THIS(kind) =>
val actualType = popStack
if (actualType.isReferenceType) subtypeTest(actualType, kind)
else icodeError("Expected this reference but found: " + actualType)
@@ -510,7 +496,7 @@ abstract class ICodeCheckers {
subtypeTest(k, elem)
case (a, b, c) =>
icodeError(" expected and array reference, and int and " + kind +
- " but " + a + ", " + b + ", " + c + " found");
+ " but " + a + ", " + b + ", " + c + " found")
}
case STORE_LOCAL(local) =>
@@ -606,7 +592,7 @@ abstract class ICodeCheckers {
case x if style.hasInstance => x + 1
case x => x
}
- if (style == Static(true))
+ if (style == Static(onInstance = true))
checkBool(method.isPrivate || method.isConstructor, "Static call to non-private method.")
checkStack(paramCount)
@@ -665,7 +651,7 @@ abstract class ICodeCheckers {
case RETURN(kind) =>
val top = popStack
if (kind.isValueType) checkType(top, kind)
- else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not");
+ else checkBool(!top.isValueType, "" + kind + " is a reference type, but " + top + " is not")
case THROW(clasz) =>
checkType(popStack, toTypeKind(clasz.tpe))
diff --git a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
index 93201089e4..3f2141782a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/ICodes.scala
@@ -8,8 +8,6 @@ package backend
package icode
import java.io.PrintWriter
-import scala.collection.mutable
-import scala.tools.nsc.symtab._
import analysis.{ Liveness, ReachingDefinitions }
import scala.tools.nsc.symtab.classfile.ICodeReader
@@ -30,14 +28,14 @@ abstract class ICodes extends AnyRef
with Repository
{
val global: Global
- import global.{ log, definitions, settings, perRunCaches }
+ import global.{ log, definitions, settings, perRunCaches, devWarning }
/** The ICode representation of classes */
val classes = perRunCaches.newMap[global.Symbol, IClass]()
/** Debugging flag */
def shouldCheckIcode = settings.check contains global.genicode.phaseName
- def checkerDebug(msg: String) = if (shouldCheckIcode && global.opt.debug) println(msg)
+ def checkerDebug(msg: String) = if (shouldCheckIcode && global.settings.debug) println(msg)
/** The ICode linearizer. */
val linearizer: Linearizer = settings.Xlinearizer.value match {
@@ -84,7 +82,7 @@ abstract class ICodes extends AnyRef
// Something is leaving open/empty blocks around (see SI-4840) so
// let's not kill the deal unless it's nonempty.
if (b.isEmpty) {
- log("!!! Found open but empty block while inlining " + m + ": removing from block list.")
+ devWarning(s"Found open but empty block while inlining $m: removing from block list.")
m.code removeBlock b
}
else dumpMethodAndAbort(m, b)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
index a38eab4515..54be9d18f1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Linearizers.scala
@@ -4,11 +4,11 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
-import scala.tools.nsc.ast._
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
@@ -36,15 +36,15 @@ trait Linearizers {
var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- val b = m.startBlock;
- blocks = Nil;
+ val b = m.startBlock
+ blocks = Nil
run {
- worklist pushAll (m.exh map (_.startBlock));
- worklist.push(b);
+ worklist pushAll (m.exh map (_.startBlock))
+ worklist.push(b)
}
- blocks.reverse;
+ blocks.reverse
}
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
@@ -56,30 +56,30 @@ trait Linearizers {
/** Linearize another subtree and append it to the existing blocks. */
def linearize(startBlock: BasicBlock): List[BasicBlock] = {
//blocks = startBlock :: Nil;
- run( { worklist.push(startBlock); } );
- blocks.reverse;
+ run( { worklist.push(startBlock); } )
+ blocks.reverse
}
def processElement(b: BasicBlock) =
if (b.nonEmpty) {
- add(b);
+ add(b)
b.lastInstruction match {
case JUMP(whereto) =>
- add(whereto);
+ add(whereto)
case CJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case CZJUMP(success, failure, _, _) =>
- add(success);
- add(failure);
+ add(success)
+ add(failure)
case SWITCH(_, labels) =>
- add(labels);
- case RETURN(_) => ();
- case THROW(clasz) => ();
+ add(labels)
+ case RETURN(_) => ()
+ case THROW(clasz) => ()
}
}
- def dequeue: Elem = worklist.pop;
+ def dequeue: Elem = worklist.pop()
/**
* Prepend b to the list, if not already scheduled.
@@ -89,25 +89,25 @@ trait Linearizers {
if (blocks.contains(b))
()
else {
- blocks = b :: blocks;
- worklist push b;
+ blocks = b :: blocks
+ worklist push b
}
}
- def add(bs: List[BasicBlock]): Unit = bs foreach add;
+ def add(bs: List[BasicBlock]): Unit = bs foreach add
}
/**
* Linearize code using a depth first traversal.
*/
class DepthFirstLinerizer extends Linearizer {
- var blocks: List[BasicBlock] = Nil;
+ var blocks: List[BasicBlock] = Nil
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
- dfs(m.startBlock);
- m.exh foreach (b => dfs(b.startBlock));
+ dfs(m.startBlock)
+ m.exh foreach (b => dfs(b.startBlock))
blocks.reverse
}
@@ -120,7 +120,7 @@ trait Linearizers {
def dfs(b: BasicBlock): Unit =
if (b.nonEmpty && add(b))
- b.successors foreach dfs;
+ b.successors foreach dfs
/**
* Prepend b to the list, if not already scheduled.
@@ -129,7 +129,7 @@ trait Linearizers {
*/
def add(b: BasicBlock): Boolean =
!(blocks contains b) && {
- blocks = b :: blocks;
+ blocks = b :: blocks
true
}
}
@@ -145,12 +145,12 @@ trait Linearizers {
val added = new mutable.BitSet
def linearize(m: IMethod): List[BasicBlock] = {
- blocks = Nil;
+ blocks = Nil
visited.clear()
- added.clear;
+ added.clear()
- m.exh foreach (b => rpo(b.startBlock));
- rpo(m.startBlock);
+ m.exh foreach (b => rpo(b.startBlock))
+ rpo(m.startBlock)
// if the start block has predecessors, it won't be the first one
// in the linearization, so we need to enforce it here
@@ -171,7 +171,7 @@ trait Linearizers {
def rpo(b: BasicBlock): Unit =
if (b.nonEmpty && !visited(b)) {
- visited += b;
+ visited += b
b.successors foreach rpo
add(b)
}
@@ -185,7 +185,7 @@ trait Linearizers {
if (!added(b.label)) {
added += b.label
- blocks = b :: blocks;
+ blocks = b :: blocks
}
}
}
@@ -198,142 +198,4 @@ trait Linearizers {
def linearize(m: IMethod): List[BasicBlock] = m.blocks
def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = sys.error("not implemented")
}
-
- /** The MSIL linearizer is used only for methods with at least one exception handler.
- * It makes sure that all the blocks belonging to a `try`, `catch` or `finally` block
- * are emitted in an order that allows the lexical nesting of try-catch-finally, just
- * like in the source code.
- */
- class MSILLinearizer extends Linearizer {
- /** The MSIL linearizer first calls a NormalLInearizer. This is because the ILGenerator checks
- * the stack size before emitting instructions. For instance, to emit a `store`, there needs
- * to be some value on the stack. This can blow up in situations like this:
- * ...
- * jump 3
- * 4: store_local 0
- * jump 5
- * 3: load_value
- * jump 4
- * 5: ...
- * here, 3 must be scheduled first.
- *
- * The NormalLinearizer also removes dead blocks (blocks without predecessor). This is important
- * in the following example:
- * try { throw new Exception }
- * catch { case e => throw e }
- * which adds a dead block containing just a "throw" (which, again, would blow up code generation
- * because of the stack size; there's no value on the stack when emitting that `throw`)
- */
- val normalLinearizer = new NormalLinearizer()
-
- def linearize(m: IMethod): List[BasicBlock] = {
-
- val handlersByCovered = m.exh.groupBy(_.covered)
-
- // number of basic blocks covered by the entire try-catch expression
- def size(covered: scala.collection.immutable.Set[BasicBlock]) = {
- val hs = handlersByCovered(covered)
- covered.size + (hs :\ 0)((h, s) => h.blocks.length + s)
- }
-
- val tryBlocks = handlersByCovered.keys.toList sortBy size
- var result = normalLinearizer.linearize(m)
- val frozen = mutable.HashSet[BasicBlock](result.head)
-
- for (tryBlock <- tryBlocks) {
- result = groupBlocks(m, result, handlersByCovered(tryBlock), frozen)
- }
- result
- }
-
- /** @param handlers a list of handlers covering the same blocks (same try, multiple catches)
- * @param frozen blocks can't be moved (fist block of a method, blocks directly following a try-catch)
- */
- def groupBlocks(method: IMethod, blocks: List[BasicBlock], handlers: List[ExceptionHandler], frozen: mutable.HashSet[BasicBlock]) = {
- assert(blocks.head == method.startBlock, method)
-
- // blocks before the try, and blocks for the try
- val beforeAndTry = new ListBuffer[BasicBlock]()
- // blocks for the handlers
- val catches = handlers map (_ => new ListBuffer[BasicBlock]())
- // blocks to be put at the end
- val after = new ListBuffer[BasicBlock]()
-
- var beforeTry = true
- val head = handlers.head
-
- for (b <- blocks) {
- if (head covers b) {
- beforeTry = false
- beforeAndTry += b
- } else {
- val handlerIndex = handlers.indexWhere(_.blocks.contains(b))
- if (handlerIndex >= 0) {
- catches(handlerIndex) += b
- } else if (beforeTry) {
- beforeAndTry += b
- } else {
- after += b
- }
- }
- }
-
- // reorder the blocks in "catches" so that the "firstBlock" is actually first
- (catches, handlers).zipped foreach { (lb, handler) =>
- lb -= handler.startBlock
- handler.startBlock +=: lb
- }
-
- // The first block emitted after a try-catch must be the one that the try / catch
- // blocks jump to (because in msil, these jumps cannot be emitted manually)
- var firstAfter: Option[BasicBlock] = None
-
- // Find the (hopefully) unique successor, look at the try and all catch blocks
- var blks = head.covered.toList :: handlers.map(_.blocks)
- while (firstAfter.isEmpty && !blks.isEmpty) {
- val b = blks.head
- blks = blks.tail
-
- val leaving = leavingBlocks(b)
- // no leaving blocks when the try or catch ends with THROW or RET
- if (!leaving.isEmpty) {
- assert(leaving.size <= 1, leaving)
- firstAfter = Some(leaving.head)
- }
- }
- if (firstAfter.isDefined) {
- val b = firstAfter.get
- if (frozen(b)) {
- assert(after contains b, b +", "+ method)
- } else {
- frozen += b
- if (beforeAndTry contains b) {
- beforeAndTry -= b
- } else {
- assert(after contains b, after)
- after -= b
- }
- b +=: after
- }
- }
-
- for (lb <- catches) { beforeAndTry ++= lb }
- beforeAndTry ++= after
- beforeAndTry.toList
- }
-
- /** Returns all direct successors of `blocks` wich are not part
- * that list, i.e. successors outside the `blocks` list.
- */
- private def leavingBlocks(blocks: List[BasicBlock]) = {
- val res = new mutable.HashSet[BasicBlock]()
- for (b <- blocks; s <- b.directSuccessors; if (!blocks.contains(s)))
- res += s
- res
- }
-
- def linearizeAt(m: IMethod, start: BasicBlock): List[BasicBlock] = {
- sys.error("not implemented")
- }
- }
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index b74770f051..4389afb2b7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -3,14 +3,13 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
-import java.io.PrintWriter
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import symtab.Flags.{ DEFERRED }
trait ReferenceEquality {
override def hashCode = System.identityHashCode(this)
@@ -48,27 +47,33 @@ trait Members {
def touched = _touched
def touched_=(b: Boolean): Unit = {
- if (b)
- blocks foreach (_.touched = true)
+ @annotation.tailrec def loop(xs: List[BasicBlock]) {
+ xs match {
+ case Nil =>
+ case x :: xs => x.touched = true ; loop(xs)
+ }
+ }
+ if (b) loop(blocks.toList)
_touched = b
}
// Constructor code
- startBlock = newBlock
+ startBlock = newBlock()
def removeBlock(b: BasicBlock) {
- if (settings.debug.value) {
- assert(blocks forall (p => !(p.successors contains b)),
- "Removing block that is still referenced in method code " + b + "preds: " + b.predecessors
- )
- assert(b != startBlock || b.successors.length == 1,
- "Removing start block with more than one successor."
- )
+ if (settings.debug) {
+ // only do this sanity check when debug is turned on because it's moderately expensive
+ val referers = blocks filter (_.successors contains b)
+ assert(referers.isEmpty, s"Trying to removing block $b (with preds ${b.predecessors.mkString}) but it is still refered to from block(s) ${referers.mkString}")
}
- if (b == startBlock)
+ if (b == startBlock) {
+ assert(b.successors.length == 1,
+ s"Removing start block ${b} with ${b.successors.length} successors (${b.successors.mkString})."
+ )
startBlock = b.successors.head
+ }
blocks -= b
assert(!blocks.contains(b))
@@ -77,7 +82,7 @@ trait Members {
}
/** This methods returns a string representation of the ICode */
- override def toString = "ICode '" + name + "'";
+ override def toString = "ICode '" + name + "'"
/* Compute a unique new label */
def nextLabel: Int = {
@@ -89,8 +94,8 @@ trait Members {
*/
def newBlock(): BasicBlock = {
touched = true
- val block = new BasicBlock(nextLabel, method);
- blocks += block;
+ val block = new BasicBlock(nextLabel, method)
+ blocks += block
block
}
}
@@ -112,25 +117,23 @@ trait Members {
var cunit: CompilationUnit = _
def addField(f: IField): this.type = {
- fields = f :: fields;
+ fields = f :: fields
this
}
def addMethod(m: IMethod): this.type = {
- methods = m :: methods;
+ methods = m :: methods
this
}
def setCompilationUnit(unit: CompilationUnit): this.type = {
- this.cunit = unit;
+ this.cunit = unit
this
}
override def toString() = symbol.fullName
- def lookupField(s: Symbol) = fields find (_.symbol == s)
def lookupMethod(s: Symbol) = methods find (_.symbol == s)
- def lookupMethod(s: Name) = methods find (_.symbol.name == s)
/* returns this methods static ctor if it has one. */
def lookupStaticCtor: Option[IMethod] = methods find (_.symbol.isStaticConstructor)
@@ -154,14 +157,13 @@ trait Members {
class IMethod(val symbol: Symbol) extends IMember {
var code: Code = NoCode
- def newBlock() = code.newBlock
+ def newBlock() = code.newBlock()
def startBlock = code.startBlock
def lastBlock = { assert(blocks.nonEmpty, symbol); blocks.last }
def blocks = code.blocksList
def linearizedBlocks(lin: Linearizer = self.linearizer): List[BasicBlock] = lin linearize this
def foreachBlock[U](f: BasicBlock => U): Unit = blocks foreach f
- def foreachInstr[U](f: Instruction => U): Unit = foreachBlock(_.toList foreach f)
var native = false
@@ -181,7 +183,7 @@ trait Members {
def hasCode = code ne NoCode
def setCode(code: Code): IMethod = {
- this.code = code;
+ this.code = code
this
}
@@ -195,7 +197,6 @@ trait Members {
}
def addLocals(ls: List[Local]) = ls foreach addLocal
- def addParams(as: List[Local]) = as foreach addParam
def lookupLocal(n: Name): Option[Local] = locals find (_.sym.name == n)
def lookupLocal(sym: Symbol): Option[Local] = locals find (_.sym == sym)
@@ -210,28 +211,7 @@ trait Members {
override def toString() = symbol.fullName
- def matchesSignature(other: IMethod) = {
- (symbol.name == other.symbol.name) &&
- (params corresponds other.params)(_.kind == _.kind) &&
- (returnType == other.returnType)
- }
-
import opcodes._
- def checkLocals(): Unit = {
- def localsSet = (code.blocks flatMap { bb =>
- bb.iterator collect {
- case LOAD_LOCAL(l) => l
- case STORE_LOCAL(l) => l
- }
- }).toSet
-
- if (hasCode) {
- log("[checking locals of " + this + "]")
- locals filterNot localsSet foreach { l =>
- log("Local " + l + " is not declared in " + this)
- }
- }
- }
/** Merge together blocks that have a single successor which has a
* single predecessor. Exception handlers are taken into account (they
@@ -243,10 +223,10 @@ trait Members {
val nextBlock: mutable.Map[BasicBlock, BasicBlock] = mutable.HashMap.empty
for (b <- code.blocks.toList
if b.successors.length == 1;
- succ = b.successors.head;
- if succ ne b;
- if succ.predecessors.length == 1;
- if succ.predecessors.head eq b;
+ succ = b.successors.head
+ if succ ne b
+ if succ.predecessors.length == 1
+ if succ.predecessors.head eq b
if !(exh.exists { (e: ExceptionHandler) =>
(e.covers(succ) && !e.covers(b)) || (e.covers(b) && !e.covers(succ)) })) {
nextBlock(b) = succ
@@ -255,10 +235,10 @@ trait Members {
var bb = code.startBlock
while (!nextBlock.isEmpty) {
if (nextBlock.isDefinedAt(bb)) {
- bb.open
+ bb.open()
var succ = bb
do {
- succ = nextBlock(succ);
+ succ = nextBlock(succ)
val lastInstr = bb.lastInstruction
/* Ticket SI-5672
* Besides removing the control-flow instruction at the end of `bb` (usually a JUMP), we have to pop any values it pushes.
@@ -269,7 +249,7 @@ trait Members {
val oldTKs = lastInstr.consumedTypes
assert(lastInstr.consumed == oldTKs.size, "Someone forgot to override consumedTypes() in " + lastInstr)
- bb.removeLastInstruction
+ bb.removeLastInstruction()
for(tk <- oldTKs.reverse) { bb.emit(DROP(tk), lastInstr.pos) }
succ.toList foreach { i => bb.emit(i, i.pos) }
code.removeBlock(succ)
@@ -277,9 +257,9 @@ trait Members {
nextBlock -= bb
} while (nextBlock.isDefinedAt(succ))
- bb.close
+ bb.close()
} else
- bb = nextBlock.keysIterator.next
+ bb = nextBlock.keysIterator.next()
}
checkValid(this)
}
@@ -297,9 +277,6 @@ trait Members {
/** Starting PC for this local's visibility range. */
var start: Int = _
- /** Ending PC for this local's visibility range. */
- var end: Int = _
-
/** PC-based ranges for this local variable's visibility */
var ranges: List[(Int, Int)] = Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index a3a0edb35d..57a768d9cb 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -3,13 +3,11 @@
* @author Martin Odersky
*/
-
-
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend
package icode
-import scala.tools.nsc.ast._
import scala.reflect.internal.util.{Position,NoPosition}
/*
@@ -67,7 +65,7 @@ import scala.reflect.internal.util.{Position,NoPosition}
* in the source files.
*/
trait Opcodes { self: ICodes =>
- import global.{Symbol, NoSymbol, Type, Name, Constant};
+ import global.{Symbol, NoSymbol, Name, Constant}
// categories of ICode instructions
final val localsCat = 1
@@ -111,17 +109,11 @@ trait Opcodes { self: ICodes =>
// Vlad: I wonder why we keep producedTypes around -- it looks like an useless thing to have
def producedTypes: List[TypeKind] = Nil
- /** This method returns the difference of size of the stack when the instruction is used */
- def difference = produced-consumed
-
/** The corresponding position in the source file */
private var _pos: Position = NoPosition
def pos: Position = _pos
- /** Used by dead code elimination. */
- var useful: Boolean = false
-
def setPos(p: Position): this.type = {
_pos = p
this
@@ -133,13 +125,6 @@ trait Opcodes { self: ICodes =>
}
object opcodes {
-
- def mayThrow(i: Instruction): Boolean = i match {
- case LOAD_LOCAL(_) | STORE_LOCAL(_) | CONSTANT(_) | THIS(_) | CZJUMP(_, _, _, _)
- | DROP(_) | DUP(_) | RETURN(_) | LOAD_EXCEPTION(_) | JUMP(_) | CJUMP(_, _, _, _) => false
- case _ => true
- }
-
/** Loads "this" on top of the stack.
* Stack: ...
* ->: ...:ref
@@ -211,7 +196,7 @@ trait Opcodes { self: ICodes =>
case class LOAD_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString());
+ "LOAD_FIELD " + (if (isStatic) field.fullName else field.toString())
override def consumed = if (isStatic) 0 else 1
override def produced = 1
@@ -273,16 +258,17 @@ trait Opcodes { self: ICodes =>
case class STORE_FIELD(field: Symbol, isStatic: Boolean) extends Instruction {
/** Returns a string representation of this instruction */
override def toString(): String =
- "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)");
+ "STORE_FIELD "+field + (if (isStatic) " (static)" else " (dynamic)")
- override def consumed = if(isStatic) 1 else 2;
- override def produced = 0;
+ override def consumed = if(isStatic) 1 else 2
+
+ override def produced = 0
override def consumedTypes =
if (isStatic)
toTypeKind(field.tpe) :: Nil
else
- REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil;
+ REFERENCE(field.owner) :: toTypeKind(field.tpe) :: Nil
override def category = fldsCat
}
@@ -409,19 +395,19 @@ trait Opcodes { self: ICodes =>
override def category = mthdsCat
}
-
+
/**
* A place holder entry that allows us to parse class files with invoke dynamic
* instructions. Because the compiler doesn't yet really understand the
* behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
* this instruction will cause a failure. The only optimization that
* should ever look at non-Scala generated icode is the inliner, and it
- * has been modified to not examine any method with invokeDynamic
+ * has been modified to not examine any method with invokeDynamic
* instructions. So if this poison pill ever causes problems then
* there's been a serious misunderstanding
*/
// TODO do the real thing
- case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+ case class INVOKE_DYNAMIC(poolEntry: Int) extends Instruction {
private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
override def consumed = error
override def produced = error
@@ -455,10 +441,12 @@ trait Opcodes { self: ICodes =>
*/
case class NEW(kind: REFERENCE) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String = "NEW "+ kind;
+ override def toString(): String = "NEW "+ kind
+
+ override def consumed = 0
+
+ override def produced = 1
- override def consumed = 0;
- override def produced = 1;
override def producedTypes = kind :: Nil
/** The corresponding constructor call. */
@@ -474,11 +462,13 @@ trait Opcodes { self: ICodes =>
*/
case class CREATE_ARRAY(elem: TypeKind, dims: Int) extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims;
+ override def toString(): String ="CREATE_ARRAY "+elem + " x " + dims
+
+ override def consumed = dims
- override def consumed = dims;
override def consumedTypes = List.fill(dims)(INT)
- override def produced = 1;
+ override def produced = 1
+
override def producedTypes = ARRAY(elem) :: Nil
override def category = arraysCat
@@ -567,7 +557,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 2
override def produced = 0
@@ -590,7 +580,7 @@ trait Opcodes { self: ICodes =>
override def toString(): String = (
"CZJUMP (" + kind + ")" +
cond + " ? "+successBlock.label+" : "+failureBlock.label
- );
+ )
override def consumed = 1
override def produced = 0
@@ -682,10 +672,11 @@ trait Opcodes { self: ICodes =>
*/
case class MONITOR_EXIT() extends Instruction {
/** Returns a string representation of this instruction */
- override def toString(): String ="MONITOR_EXIT";
+ override def toString(): String ="MONITOR_EXIT"
- override def consumed = 1;
- override def produced = 0;
+ override def consumed = 1
+
+ override def produced = 0
override def consumedTypes = ObjectReference :: Nil
@@ -734,8 +725,6 @@ trait Opcodes { self: ICodes =>
/** Is this a static method call? */
def isStatic: Boolean = false
- def isSuper: Boolean = false
-
/** Is this an instance method call? */
def hasInstance: Boolean = true
@@ -769,77 +758,7 @@ trait Opcodes { self: ICodes =>
* On JVM, translated to `invokespecial`.
*/
case class SuperCall(mix: Name) extends InvokeStyle {
- override def isSuper = true
override def toString(): String = { "super(" + mix + ")" }
}
-
-
- // CLR backend
-
- case class CIL_LOAD_LOCAL_ADDRESS(local: Local) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "CIL_LOAD_LOCAL_ADDRESS "+local //+isArgument?" (argument)":"";
-
- override def consumed = 0
- override def produced = 1
-
- override def producedTypes = msil_mgdptr(local.kind) :: Nil
-
- override def category = localsCat
- }
-
- case class CIL_LOAD_FIELD_ADDRESS(field: Symbol, isStatic: Boolean) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String =
- "CIL_LOAD_FIELD_ADDRESS " + (if (isStatic) field.fullName else field.toString)
-
- override def consumed = if (isStatic) 0 else 1
- override def produced = 1
-
- override def consumedTypes = if (isStatic) Nil else REFERENCE(field.owner) :: Nil;
- override def producedTypes = msil_mgdptr(REFERENCE(field.owner)) :: Nil;
-
- override def category = fldsCat
- }
-
- case class CIL_LOAD_ARRAY_ITEM_ADDRESS(kind: TypeKind) extends Instruction {
- /** Returns a string representation of this instruction */
- override def toString(): String = "CIL_LOAD_ARRAY_ITEM_ADDRESS (" + kind + ")"
-
- override def consumed = 2
- override def produced = 1
-
- override def consumedTypes = ARRAY(kind) :: INT :: Nil
- override def producedTypes = msil_mgdptr(kind) :: Nil
-
- override def category = arraysCat
- }
-
- case class CIL_UNBOX(valueType: TypeKind) extends Instruction {
- override def toString(): String = "CIL_UNBOX " + valueType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList // actually consumes a 'boxed valueType'
- override def produced = 1
- override def producedTypes = msil_mgdptr(valueType) :: Nil
- override def category = objsCat
- }
-
- case class CIL_INITOBJ(valueType: TypeKind) extends Instruction {
- override def toString(): String = "CIL_INITOBJ " + valueType
- override def consumed = 1
- override def consumedTypes = ObjectReferenceList // actually consumes a managed pointer
- override def produced = 0
- override def category = objsCat
- }
-
- case class CIL_NEWOBJ(method: Symbol) extends Instruction {
- override def toString(): String = "CIL_NEWOBJ " + hostClass.fullName + method.fullName
- var hostClass: Symbol = method.owner;
- override def consumed = method.tpe.paramTypes.length
- override def consumedTypes = method.tpe.paramTypes map toTypeKind
- override def produced = 1
- override def producedTypes = toTypeKind(method.tpe.resultType) :: Nil
- override def category = objsCat
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
index c8579041ba..4fa717309e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Primitives.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package backend
-package icode;
+package icode
-import java.io.PrintWriter;
+import java.io.PrintWriter
trait Primitives { self: ICodes =>
@@ -51,12 +51,12 @@ trait Primitives { self: ICodes =>
// type : (src) => dst
// range: src,dst <- { Ix, Ux, Rx }
// jvm : i2{l, f, d}, l2{i, f, d}, f2{i, l, d}, d2{i, l, f}, i2{b, c, s}
- case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive;
+ case class Conversion(src: TypeKind, dst: TypeKind) extends Primitive
// type : (Array[REF]) => I4
// range: type <- { BOOL, Ix, Ux, Rx, REF }
// jvm : arraylength
- case class ArrayLength(kind: TypeKind) extends Primitive;
+ case class ArrayLength(kind: TypeKind) extends Primitive
// type : (buf,el) => buf
// range: lf,rg <- { BOOL, Ix, Ux, Rx, REF, STR }
@@ -76,25 +76,12 @@ trait Primitives { self: ICodes =>
/** Pretty printer for primitives */
class PrimitivePrinter(out: PrintWriter) {
-
def print(s: String): PrimitivePrinter = {
out.print(s)
this
}
def print(o: AnyRef): PrimitivePrinter = print(o.toString())
-
- def printPrimitive(prim: Primitive) = prim match {
- case Negation(kind) =>
- print("!")
-
- case Test(op, kind, zero) =>
- print(op).print(kind)
-
- case Comparison(op, kind) =>
- print(op).print("(").print(kind)
-
- }
}
/** This class represents a comparison operation. */
@@ -243,9 +230,9 @@ trait Primitives { self: ICodes =>
/** Returns a string representation of this operation. */
override def toString(): String = this match {
- case AND => return "AND"
- case OR => return "OR"
- case XOR => return "XOR"
+ case AND => "AND"
+ case OR => "OR"
+ case XOR => "XOR"
case _ => throw new RuntimeException("LogicalOp unknown case")
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index 6cac641e3e..1fe33f78e7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -8,13 +8,9 @@ package backend
package icode
import java.io.PrintWriter
-import scala.tools.nsc.symtab.Flags
-import scala.reflect.internal.util.Position
trait Printers { self: ICodes =>
import global._
- import global.icodes.opcodes._
- import global.icodes._
class TextPrinter(writer: PrintWriter, lin: Linearizer) {
private var margin = 0
@@ -31,15 +27,15 @@ trait Printers { self: ICodes =>
def print(o: Any) { print(o.toString()) }
def println(s: String) {
- print(s);
- println
+ print(s)
+ println()
}
def println() {
out.println()
var i = 0
while (i < margin) {
- print(" ");
+ print(" ")
i += 1
}
}
@@ -57,26 +53,26 @@ trait Printers { self: ICodes =>
}
def printClass(cls: IClass) {
- print(cls.symbol.toString()); print(" extends ");
- printList(cls.symbol.info.parents, ", ");
- indent; println(" {");
- println("// fields:");
- cls.fields.foreach(printField); println;
- println("// methods");
- cls.methods.foreach(printMethod);
- undent; println;
+ print(cls.symbol.toString()); print(" extends ")
+ printList(cls.symbol.info.parents, ", ")
+ indent(); println(" {")
+ println("// fields:")
+ cls.fields.foreach(printField); println()
+ println("// methods")
+ cls.methods.foreach(printMethod)
+ undent(); println()
println("}")
}
def printField(f: IField) {
- print(f.symbol.keyString); print(" ");
- print(f.symbol.nameString); print(": ");
- println(f.symbol.info.toString());
+ print(f.symbol.keyString); print(" ")
+ print(f.symbol.nameString); print(": ")
+ println(f.symbol.info.toString())
}
def printMethod(m: IMethod) {
- print("def "); print(m.symbol.name);
- print("("); printList(printParam)(m.params, ", "); print(")");
+ print("def "); print(m.symbol.name)
+ print("("); printList(printParam)(m.params, ", "); print(")")
print(": "); print(m.symbol.info.resultType)
if (!m.isAbstractMethod) {
@@ -84,40 +80,40 @@ trait Printers { self: ICodes =>
println("locals: " + m.locals.mkString("", ", ", ""))
println("startBlock: " + m.startBlock)
println("blocks: " + m.code.blocks.mkString("[", ",", "]"))
- println
+ println()
lin.linearize(m) foreach printBlock
println("}")
- indent; println("Exception handlers: ")
+ indent(); println("Exception handlers: ")
m.exh foreach printExceptionHandler
- undent; println
+ undent(); println()
} else
- println
+ println()
}
def printParam(p: Local) {
- print(p.sym.name); print(": "); print(p.sym.info);
+ print(p.sym.name); print(": "); print(p.sym.info)
print(" ("); print(p.kind); print(")")
}
def printExceptionHandler(e: ExceptionHandler) {
- indent;
- println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock);
- println("consisting of blocks: " + e.blocks);
- undent;
- println("with finalizer: " + e.finalizer);
-// linearizer.linearize(e.startBlock) foreach printBlock;
+ indent()
+ println("catch (" + e.cls.simpleName + ") in " + e.covered.toSeq.sortBy(_.label) + " starting at: " + e.startBlock)
+ println("consisting of blocks: " + e.blocks)
+ undent()
+ println("with finalizer: " + e.finalizer)
+ // linearizer.linearize(e.startBlock) foreach printBlock;
}
def printBlock(bb: BasicBlock) {
print(bb.label)
if (bb.loopHeader) print("[loop header]")
- print(": ");
- if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
- indent; println
+ print(": ")
+ if (settings.debug) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
+ indent(); println()
bb.toList foreach printInstruction
- undent; println
+ undent(); println()
}
def printInstruction(i: Instruction) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
index e73015c4da..10d57df4a3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Repository.scala
@@ -26,17 +26,6 @@ trait Repository {
/** The icode of the given class, if available */
def icode(sym: Symbol): Option[IClass] = (classes get sym) orElse (loaded get sym)
- /** The icode of the given class. If not available, it loads
- * its bytecode.
- */
- def icode(sym: Symbol, force: Boolean): IClass =
- icode(sym) getOrElse {
- log("loading " + sym)
- load(sym)
- assert(available(sym))
- loaded(sym)
- }
-
/** Load bytecode for given symbol. */
def load(sym: Symbol): Boolean = {
try {
@@ -50,7 +39,7 @@ trait Repository {
} catch {
case e: Throwable => // possible exceptions are MissingRequirementError, IOException and TypeError -> no better common supertype
log("Failed to load %s. [%s]".format(sym.fullName, e.getMessage))
- if (settings.debug.value) { e.printStackTrace }
+ if (settings.debug) { e.printStackTrace }
false
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 4f8fda8024..2c8fda85f4 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -55,7 +55,7 @@ trait TypeKinds { self: ICodes =>
def toType: Type = reversePrimitiveMap get this map (_.tpe) getOrElse {
this match {
- case REFERENCE(cls) => cls.tpe
+ case REFERENCE(cls) => cls.tpe_*
case ARRAY(elem) => arrayType(elem.toType)
case _ => abort("Unknown type kind.")
}
@@ -66,7 +66,6 @@ trait TypeKinds { self: ICodes =>
def isValueType = false
def isBoxedType = false
final def isRefOrArrayType = isReferenceType || isArrayType
- final def isRefArrayOrBoxType = isRefOrArrayType || isBoxedType
final def isNothingType = this == NothingReference
final def isNullType = this == NullReference
final def isInterfaceType = this match {
@@ -89,11 +88,20 @@ trait TypeKinds { self: ICodes =>
final def isNumericType: Boolean = isIntegralType | isRealType
/** Simple subtyping check */
- def <:<(other: TypeKind): Boolean = (this eq other) || (this match {
- case BOOL | BYTE | SHORT | CHAR => other == INT || other == LONG
- case _ => this eq other
- })
+ def <:<(other: TypeKind): Boolean
+ /**
+ * this is directly assignable to other if no coercion or
+ * casting is needed to convert this to other. It's a distinct
+ * relationship from <:< because on the JVM, BOOL, BYTE, CHAR,
+ * SHORT need no coercion to INT even though JVM arrays
+ * are covariant, ARRAY[SHORT] is not a subtype of ARRAY[INT]
+ */
+ final def isAssignabledTo(other: TypeKind): Boolean = other match {
+ case INT => this.isIntSizedType
+ case _ => this <:< other
+ }
+
/** Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?) */
def isWideType: Boolean = false
@@ -112,10 +120,9 @@ trait TypeKinds { self: ICodes =>
override def toString = {
this.getClass.getName stripSuffix "$" dropWhile (_ != '$') drop 1
}
+ def <:<(other: TypeKind): Boolean = this eq other
}
- var lubs0 = 0
-
/**
* The least upper bound of two typekinds. They have to be either
* REFERENCE or ARRAY kinds.
@@ -123,24 +130,23 @@ trait TypeKinds { self: ICodes =>
* The lub is based on the lub of scala types.
*/
def lub(a: TypeKind, b: TypeKind): TypeKind = {
- /** The compiler's lub calculation does not order classes before traits.
- * This is apparently not wrong but it is inconvenient, and causes the
- * icode checker to choke when things don't match up. My attempts to
- * alter the calculation at the compiler level were failures, so in the
- * interests of a working icode checker I'm making the adjustment here.
+ /* The compiler's lub calculation does not order classes before traits.
+ * This is apparently not wrong but it is inconvenient, and causes the
+ * icode checker to choke when things don't match up. My attempts to
+ * alter the calculation at the compiler level were failures, so in the
+ * interests of a working icode checker I'm making the adjustment here.
*
- * Example where we'd like a different answer:
+ * Example where we'd like a different answer:
*
- * abstract class Tom
- * case object Bob extends Tom
- * case object Harry extends Tom
- * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product"
+ * abstract class Tom
+ * case object Bob extends Tom
+ * case object Harry extends Tom
+ * List(Bob, Harry) // compiler calculates "Product with Tom" rather than "Tom with Product"
*
- * Here we make the adjustment by rewinding to a pre-erasure state and
- * sifting through the parents for a class type.
+ * Here we make the adjustment by rewinding to a pre-erasure state and
+ * sifting through the parents for a class type.
*/
- def lub0(tk1: TypeKind, tk2: TypeKind): Type = beforeUncurry {
- import definitions._
+ def lub0(tk1: TypeKind, tk2: TypeKind): Type = enteringUncurry {
val tp = global.lub(List(tk1.toType, tk2.toType))
val (front, rest) = tp.parents span (_.typeSymbol.isTrait)
@@ -284,7 +290,7 @@ trait TypeKinds { self: ICodes =>
}
/** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = isNothingType || (other match {
+ def <:<(other: TypeKind) = isNothingType || (other match {
case REFERENCE(cls2) => cls.tpe <:< cls2.tpe
case ARRAY(_) => cls == NullClass
case _ => false
@@ -298,7 +304,7 @@ trait TypeKinds { self: ICodes =>
else ARRAY(ArrayN(elem, dims - 1))
}
- final case class ARRAY(val elem: TypeKind) extends TypeKind {
+ final case class ARRAY(elem: TypeKind) extends TypeKind {
override def toString = "ARRAY[" + elem + "]"
override def isArrayType = true
override def dimensions = 1 + elem.dimensions
@@ -322,7 +328,7 @@ trait TypeKinds { self: ICodes =>
/** Array subtyping is covariant, as in Java. Necessary for checking
* code that interacts with Java. */
- override def <:<(other: TypeKind) = other match {
+ def <:<(other: TypeKind) = other match {
case ARRAY(elem2) => elem <:< elem2
case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
case _ => false
@@ -340,7 +346,7 @@ trait TypeKinds { self: ICodes =>
}
/** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = other match {
+ def <:<(other: TypeKind) = other match {
case BOXED(`kind`) => true
case REFERENCE(AnyRefClass | ObjectClass) => true // TODO: platform dependent!
case _ => false
@@ -353,6 +359,7 @@ trait TypeKinds { self: ICodes =>
*/
case object ConcatClass extends TypeKind {
override def toString = "ConcatClass"
+ def <:<(other: TypeKind): Boolean = this eq other
/**
* Approximate `lub`. The common type of two references is
@@ -363,19 +370,16 @@ trait TypeKinds { self: ICodes =>
case REFERENCE(_) => AnyRefReference
case _ => uncomparable(other)
}
-
- /** Checks subtyping relationship. */
- override def <:<(other: TypeKind) = this eq other
}
////////////////// Conversions //////////////////////////////
/** Return the TypeKind of the given type
*
- * Call to .normalize fixes #3003 (follow type aliases). Otherwise,
+ * Call to dealiasWiden fixes #3003 (follow type aliases). Otherwise,
* arrayOrClassType below would return ObjectReference.
*/
- def toTypeKind(t: Type): TypeKind = t.normalize match {
+ def toTypeKind(t: Type): TypeKind = t.dealiasWiden match {
case ThisType(ArrayClass) => ObjectReference
case ThisType(sym) => REFERENCE(sym)
case SingleType(_, sym) => primitiveOrRefType(sym)
@@ -431,11 +435,4 @@ trait TypeKinds { self: ICodes =>
primitiveTypeMap.getOrElse(sym, newReference(sym))
private def primitiveOrClassType(sym: Symbol, targs: List[Type]) =
primitiveTypeMap.getOrElse(sym, arrayOrClassType(sym, targs))
-
- def msil_mgdptr(tk: TypeKind): TypeKind = (tk: @unchecked) match {
- case REFERENCE(cls) => REFERENCE(loaders.clrTypes.mdgptrcls4clssym(cls))
- // TODO have ready class-symbols for the by-ref versions of built-in valuetypes
- case _ => abort("cannot obtain a managed pointer for " + tk)
- }
-
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 23d3d05c64..57d51dad49 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -15,15 +15,11 @@ package icode
trait TypeStacks {
self: ICodes =>
- import opcodes._
-
/* This class simulates the type of the operand
* stack of the ICode.
*/
type Rep = List[TypeKind]
- object NoTypeStack extends TypeStack(Nil) { }
-
class TypeStack(var types: Rep) {
if (types.nonEmpty)
checkerDebug("Created " + this)
@@ -71,14 +67,6 @@ trait TypeStacks {
def apply(n: Int): TypeKind = types(n)
- /**
- * A TypeStack agrees with another one if they have the same
- * length and each type kind agrees position-wise. Two
- * types agree if one is a subtype of the other.
- */
- def agreesWith(other: TypeStack): Boolean =
- (types corresponds other.types)((t1, t2) => t1 <:< t2 || t2 <:< t1)
-
/* This method returns a String representation of the stack */
override def toString() =
if (types.isEmpty) "[]"
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 53111d0ade..0c3f92f13f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{ mutable, immutable }
@@ -26,12 +27,8 @@ abstract class CopyPropagation {
case object This extends Location
/** Values that can be on the stack. */
- abstract class Value {
- def isRecord = false
- }
- case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value {
- override def isRecord = true
- }
+ abstract class Value { }
+ case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { }
/** The value of some location in memory. */
case class Deref(l: Location) extends Value
@@ -91,16 +88,6 @@ abstract class CopyPropagation {
loop(l) getOrElse Deref(LocalVar(l))
}
- /* Return the binding for the given field of the given record */
- def getBinding(r: Record, f: Symbol): Value = {
- assert(r.bindings contains f, "Record " + r + " does not contain a field " + f)
-
- r.bindings(f) match {
- case Deref(LocalVar(l)) => getBinding(l)
- case target => target
- }
- }
-
/** Return a local which contains the same value as this field, if any.
* If the field holds a reference to a local, the returned value is the
* binding of that local.
@@ -137,7 +124,7 @@ abstract class CopyPropagation {
}
override def toString(): String =
- "\nBindings: " + bindings + "\nStack: " + stack;
+ "\nBindings: " + bindings + "\nStack: " + stack
def dup: State = {
val b: Bindings = mutable.HashMap()
@@ -178,7 +165,7 @@ abstract class CopyPropagation {
val resBindings = mutable.HashMap[Location, Value]()
for ((k, v) <- a.bindings if b.bindings.isDefinedAt(k) && v == b.bindings(k))
- resBindings += (k -> v);
+ resBindings += (k -> v)
new State(resBindings, resStack)
}
}
@@ -203,20 +190,20 @@ abstract class CopyPropagation {
debuglog("CopyAnalysis added point: " + b)
}
m.exh foreach { e =>
- in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack);
+ in(e.startBlock) = new copyLattice.State(copyLattice.emptyBinding, copyLattice.exceptionHandlerStack)
}
// first block is special: it's not bottom, but a precisely defined state with no bindings
- in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil);
+ in(m.startBlock) = new lattice.State(lattice.emptyBinding, Nil)
}
}
override def run() {
forwardAnalysis(blockTransfer)
- if (settings.debug.value) {
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(in(b) != lattice.bottom,
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
@@ -241,7 +228,7 @@ abstract class CopyPropagation {
case CONSTANT(k) =>
if (k.tag != UnitTag)
- out.stack = Const(k) :: out.stack;
+ out.stack = Const(k) :: out.stack
case LOAD_ARRAY_ITEM(_) =>
out.stack = (Unknown :: out.stack.drop(2))
@@ -290,14 +277,14 @@ abstract class CopyPropagation {
v match {
case Deref(LocalVar(other)) =>
if (other != local)
- out.bindings += (LocalVar(local) -> v);
+ out.bindings += (LocalVar(local) -> v)
case _ =>
out.bindings += (LocalVar(local) -> v)
}
case Nil =>
sys.error("Incorrect icode in " + method + ". Expecting something on the stack.")
}
- out.stack = out.stack drop 1;
+ out.stack = out.stack drop 1
case STORE_THIS(_) =>
cleanReferencesTo(out, This)
@@ -305,14 +292,14 @@ abstract class CopyPropagation {
case STORE_FIELD(field, isStatic) =>
if (isStatic)
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
else {
- out.stack = out.stack.drop(2);
- cleanReferencesTo(out, Field(AllRecords, field));
+ out.stack = out.stack.drop(2)
+ cleanReferencesTo(out, Field(AllRecords, field))
in.stack match {
case v :: Record(_, bindings) :: vs =>
bindings += (field -> v)
- case _ => ();
+ case _ => ()
}
}
@@ -322,7 +309,7 @@ abstract class CopyPropagation {
case CALL_METHOD(method, style) => style match {
case Dynamic =>
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
case Static(onInstance) =>
if (onInstance) {
@@ -333,19 +320,19 @@ abstract class CopyPropagation {
case Record(_, bindings) =>
for (v <- out.stack.take(method.info.paramTypes.length + 1)
if v ne obj) {
- bindings ++= getBindingsForPrimaryCtor(in, method);
+ bindings ++= getBindingsForPrimaryCtor(in, method)
}
case _ => ()
}
// put the Record back on the stack and remove the 'returned' value
out.stack = out.stack.drop(1 + method.info.paramTypes.length)
} else
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
} else
- out = simulateCall(in, method, true)
+ out = simulateCall(in, method, static = true)
case SuperCall(_) =>
- out = simulateCall(in, method, false)
+ out = simulateCall(in, method, static = false)
}
case BOX(tpe) =>
@@ -404,7 +391,7 @@ abstract class CopyPropagation {
out.stack = out.stack.head :: out.stack
case MONITOR_ENTER() =>
- out.stack = out.stack.drop(1);
+ out.stack = out.stack.drop(1)
case MONITOR_EXIT() =>
out.stack = out.stack.drop(1)
@@ -452,7 +439,7 @@ abstract class CopyPropagation {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
case rec @ Record(_, _) =>
- cleanRecord(rec);
+ cleanRecord(rec)
true
case _ => true
}) &&
@@ -463,22 +450,17 @@ abstract class CopyPropagation {
}
}
- /** Update the state <code>s</code> after the call to <code>method</code>.
+ /** Update the state `s` after the call to `method`.
* The stack elements are dropped and replaced by the result of the call.
* If the method is impure, all bindings to record fields are cleared.
- *
- * @param state ...
- * @param method ...
- * @param static ...
- * @return ...
*/
final def simulateCall(state: copyLattice.State, method: Symbol, static: Boolean): copyLattice.State = {
- val out = new copyLattice.State(state.bindings, state.stack);
- out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1));
- if (method.info.resultType != definitions.UnitClass.tpe && !method.isConstructor)
- out.stack = Unknown :: out.stack;
+ val out = new copyLattice.State(state.bindings, state.stack)
+ out.stack = out.stack.drop(method.info.paramTypes.length + (if (static) 0 else 1))
+ if (method.info.resultType != definitions.UnitTpe && !method.isConstructor)
+ out.stack = Unknown :: out.stack
if (!isPureMethod(method))
- invalidateRecords(out);
+ invalidateRecords(out)
out
}
@@ -519,8 +501,8 @@ abstract class CopyPropagation {
* they are passed on the stack. It works for primary constructors.
*/
private def getBindingsForPrimaryCtor(in: copyLattice.State, ctor: Symbol): mutable.Map[Symbol, Value] = {
- val paramAccessors = ctor.owner.constrParamAccessors;
- var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1);
+ val paramAccessors = ctor.owner.constrParamAccessors
+ var values = in.stack.take(1 + ctor.info.paramTypes.length).reverse.drop(1)
val bindings = mutable.HashMap[Symbol, Value]()
debuglog("getBindings for: " + ctor + " acc: " + paramAccessors)
@@ -546,24 +528,22 @@ abstract class CopyPropagation {
// + " having acc: " + (paramAccessors map (_.tpe))+ " vs. params" + paramTypes
// + "\n\t failed at pos " + i + " with " + p.tpe + " == " + paramTypes(i))
if (p.tpe == paramTypes(i))
- bindings += (p -> values.head);
- values = values.tail;
+ bindings += (p -> values.head)
+ values = values.tail
}
debuglog("\t" + bindings)
bindings
}
- /** Is symbol <code>m</code> a pure method?
- *
- * @param m ...
- * @return ...
+ /** Is symbol `m` a pure method?
*/
final def isPureMethod(m: Symbol): Boolean =
m.isGetter // abstract getters are still pure, as we 'know'
final override def toString() = (
- method.blocks map { b =>
+ if (method eq null) List("<null>")
+ else method.blocks map { b =>
"\nIN(%s):\t Bindings: %s".format(b.label, in(b).bindings) +
"\nIN(%s):\t Stack: %s".format(b.label, in(b).stack)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 04c3eedbad..a378998f8f 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -4,7 +4,8 @@
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{ mutable, immutable }
@@ -30,16 +31,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
/* Implement this function to initialize the worklist. */
def init(f: => Unit): Unit = {
iterations = 0
- in.clear; out.clear; worklist.clear; visited.clear;
- f
- }
-
- /** Reinitialize, but keep the old solutions. Should be used when reanalyzing the
- * same method, after some code transformation.
- */
- def reinit(f: => Unit): Unit = {
- iterations = 0
- worklist.clear; visited.clear;
+ in.clear(); out.clear(); worklist.clear(); visited.clear()
f
}
@@ -55,7 +47,7 @@ trait DataFlowAnalysis[L <: SemiLattice] {
while (!worklist.isEmpty) {
if (stat) iterations += 1
//Console.println("worklist in: " + worklist);
- val point = worklist.iterator.next; worklist -= point; visited += point;
+ val point = worklist.iterator.next(); worklist -= point; visited += point
//Console.println("taking out point: " + point + " worklist out: " + worklist);
val output = f(point, in(point))
@@ -82,17 +74,13 @@ trait DataFlowAnalysis[L <: SemiLattice] {
sys.error("Could not find element " + e.getMessage)
}
- /** ...
- *
- * @param f ...
- */
def backwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit =
while (worklist.nonEmpty) {
if (stat) iterations += 1
val point = worklist.head
worklist -= point
- out(point) = lattice.lub(point.successors map in.apply, false) // TODO check for exception handlers
+ out(point) = lattice.lub(point.successors map in.apply, exceptional = false) // TODO check for exception handlers
val input = f(point, out(point))
if ((lattice.bottom == in(point)) || input != in(point)) {
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index abda639dec..60f7857d0c 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -74,10 +74,10 @@ abstract class Liveness {
override def run() {
backwardAnalysis(blockTransfer)
- if (settings.debug.value) {
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited?"))
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 2717c432e8..fecd48ed27 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -51,8 +51,8 @@ abstract class ReachingDefinitions {
// it'd be nice not to call zip with mismatched sequences because
// it makes it harder to spot the real problems.
val result = (a.stack, b.stack).zipped map (_ ++ _)
- if (settings.debug.value && (a.stack.length != b.stack.length))
- debugwarn("Mismatched stacks in ReachingDefinitions#lub2: " + a.stack + ", " + b.stack + ", returning " + result)
+ if (settings.debug && (a.stack.length != b.stack.length))
+ devWarning(s"Mismatched stacks in ReachingDefinitions#lub2: ${a.stack}, ${b.stack}, returning $result")
result
}
)
@@ -141,13 +141,13 @@ abstract class ReachingDefinitions {
override def run() {
forwardAnalysis(blockTransfer)
- if (settings.debug.value) {
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(lattice.bottom != in(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? " + in(b)
+ ": bot: " + lattice.bottom
+ "\nin(b) == bottom: " + (in(b) == lattice.bottom)
- + "\nbottom == in(b): " + (lattice.bottom == in(b))));
+ + "\nbottom == in(b): " + (lattice.bottom == in(b))))
}
}
@@ -155,7 +155,7 @@ abstract class ReachingDefinitions {
import lattice.IState
def updateReachingDefinition(b: BasicBlock, idx: Int, rd: ListSet[Definition]): ListSet[Definition] = {
val STORE_LOCAL(local) = b(idx)
- var tmp = local
+ val tmp = local
(rd filter { case (l, _, _) => l != tmp }) + ((tmp, b, idx))
}
@@ -197,7 +197,7 @@ abstract class ReachingDefinitions {
def findDefs(bb: BasicBlock, idx: Int, m: Int, depth: Int): List[(BasicBlock, Int)] = if (idx > 0) {
assert(bb.closed, bb)
- var instrs = bb.getArray
+ val instrs = bb.getArray
var res: List[(BasicBlock, Int)] = Nil
var i = idx
var n = m
@@ -240,7 +240,8 @@ abstract class ReachingDefinitions {
findDefs(bb, idx, m, 0)
override def toString: String = {
- method.code.blocks map { b =>
+ if (method eq null) "<null>"
+ else method.code.blocks map { b =>
" entry(%s) = %s\n".format(b, in(b)) +
" exit(%s) = %s\n".format(b, out(b))
} mkString ("ReachingDefinitions {\n", "\n", "\n}")
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index b2ecb431ee..7a53293384 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.icode.analysis
import scala.collection.{mutable, immutable}
@@ -68,7 +69,6 @@ abstract class TypeFlowAnalysis {
* names to types and a type stack.
*/
object typeFlowLattice extends SemiLattice {
- import icodes._
type Elem = IState[VarBinding, icodes.TypeStack]
val top = new Elem(new VarBinding, typeStackLattice.top)
@@ -132,15 +132,15 @@ abstract class TypeFlowAnalysis {
init(m)
}
- def run = {
- timer.start
+ def run() = {
+ timer.start()
// icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
- val t = timer.stop
- if (settings.debug.value) {
+ timer.stop
+ if (settings.debug) {
linearizer.linearize(method).foreach(b => if (b != method.startBlock)
assert(visited.contains(b),
- "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
+ "Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited))
}
// log("" + method.symbol.fullName + " [" + method.code.blocks.size + " blocks] "
// + "\n\t" + iterations + " iterations: " + t + " ms."
@@ -168,7 +168,7 @@ abstract class TypeFlowAnalysis {
val bindings = out.vars
val stack = out.stack
- if (settings.debug.value) {
+ if (settings.debug) {
// Console.println("[before] Stack: " + stack);
// Console.println(i);
}
@@ -208,7 +208,7 @@ abstract class TypeFlowAnalysis {
case Test(_, kind, zero) =>
stack.pop
if (!zero) { stack.pop }
- stack push BOOL;
+ stack push BOOL
case Comparison(_, _) => stack.pop2; stack push INT
@@ -269,36 +269,6 @@ abstract class TypeFlowAnalysis {
out
} // interpret
-
- class SimulatedStack {
- private var types: List[InferredType] = Nil
- private var depth = 0
-
- /** Remove and return the topmost element on the stack. If the
- * stack is empty, return a reference to a negative index on the
- * stack, meaning it refers to elements pushed by a predecessor block.
- */
- def pop: InferredType = types match {
- case head :: rest =>
- types = rest
- head
- case _ =>
- depth -= 1
- TypeOfStackPos(depth)
- }
-
- def pop2: (InferredType, InferredType) = {
- (pop, pop)
- }
-
- def push(t: InferredType) {
- depth += 1
- types = types ::: List(t)
- }
-
- def push(k: TypeKind) { push(Const(k)) }
- }
-
abstract class InferredType {
/** Return the type kind pointed by this inferred type. */
def getKind(in: lattice.Elem): icodes.TypeKind = this match {
@@ -326,7 +296,6 @@ abstract class TypeFlowAnalysis {
class TransferFunction(consumed: Int, gens: List[Gen]) extends (lattice.Elem => lattice.Elem) {
def apply(in: lattice.Elem): lattice.Elem = {
val out = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val bindings = out.vars
val stack = out.stack
out.stack.pop(consumed)
@@ -387,9 +356,9 @@ abstract class TypeFlowAnalysis {
override def run {
- timer.start
+ timer.start()
forwardAnalysis(blockTransfer)
- val t = timer.stop
+ timer.stop
/* Now that `forwardAnalysis(blockTransfer)` has finished, all inlining candidates can be found in `remainingCALLs`,
whose keys are callsites and whose values are pieces of information about the typestack just before the callsite in question.
@@ -399,7 +368,7 @@ abstract class TypeFlowAnalysis {
preCandidates += rc._2.bb
}
- if (settings.debug.value) {
+ if (settings.debug) {
for(b <- callerLin; if (b != method.startBlock) && preCandidates(b)) {
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited)
@@ -428,7 +397,7 @@ abstract class TypeFlowAnalysis {
override def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
var result = lattice.IState(new VarBinding(in.vars), new TypeStack(in.stack))
- val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null;
+ val stopAt = if(isOnPerimeter(b)) lastInstruction(b) else null
var isPastLast = false
var instrs = b.toList
@@ -546,9 +515,6 @@ abstract class TypeFlowAnalysis {
relevantBBs ++= blocks
}
- /* the argument is also included in the result */
- private def transitivePreds(b: BasicBlock): Set[BasicBlock] = { transitivePreds(List(b)) }
-
/* those BBs in the argument are also included in the result */
private def transitivePreds(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
val result = mutable.Set.empty[BasicBlock]
@@ -562,19 +528,6 @@ abstract class TypeFlowAnalysis {
result.toSet
}
- /* those BBs in the argument are also included in the result */
- private def transitiveSuccs(starters: Traversable[BasicBlock]): Set[BasicBlock] = {
- val result = mutable.Set.empty[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.successors; if !result(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toSet
- }
-
/* A basic block B is "on the perimeter" of the current control-flow subgraph if none of its successors belongs to that subgraph.
* In that case, for the purposes of inlining, we're interested in the typestack right before the last inline candidate in B, not in those afterwards.
* In particular we can do without computing the outflow at B. */
@@ -646,10 +599,10 @@ abstract class TypeFlowAnalysis {
return
} else if(staleOut.isEmpty && inlined.isEmpty && staleIn.isEmpty) {
// this promotes invoking reinit if in doubt, no performance degradation will ensue!
- return;
+ return
}
- worklist.clear // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
+ worklist.clear() // calling reinit(f: => Unit) would also clear visited, thus forgetting about blocks visited before reinit.
// asserts conveying an idea what CFG shapes arrive here:
// staleIn foreach (p => assert( !in.isDefinedAt(p), p))
@@ -685,12 +638,6 @@ abstract class TypeFlowAnalysis {
if(!worklist.contains(b)) { worklist += b }
}
- /* this is not a general purpose method to add to the worklist,
- * because the assert is expected to hold only when called from MTFAGrowable.reinit() */
- private def enqueue(bs: Traversable[BasicBlock]) {
- bs foreach enqueue
- }
-
private def blankOut(blocks: scala.collection.Set[BasicBlock]) {
blocks foreach { b =>
in(b) = typeFlowLattice.bottom
@@ -719,14 +666,14 @@ abstract class TypeFlowAnalysis {
override def forwardAnalysis(f: (P, lattice.Elem) => lattice.Elem): Unit = {
while (!worklist.isEmpty && relevantBBs.nonEmpty) {
if (stat) iterations += 1
- val point = worklist.iterator.next; worklist -= point;
+ val point = worklist.iterator.next(); worklist -= point
if(relevantBBs(point)) {
shrinkedWatchlist = false
val output = f(point, in(point))
- visited += point;
+ visited += point
if(isOnPerimeter(point)) {
if(shrinkedWatchlist && !isWatching(point)) {
- relevantBBs -= point;
+ relevantBBs -= point
populatePerimeter()
}
} else {
@@ -761,10 +708,6 @@ abstract class TypeFlowAnalysis {
private var lastStart = 0L
- def reset() {
- millis = 0L
- }
-
def start() {
lastStart = System.currentTimeMillis
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index fb1f45fa40..0df828393d 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -6,14 +6,14 @@
package scala.tools.nsc
package backend.jvm
-import java.io.{ DataOutputStream, FileOutputStream, OutputStream, File => JFile }
+import java.io.{ DataOutputStream, FileOutputStream, IOException, OutputStream, File => JFile }
import scala.tools.nsc.io._
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.util.JavapClass
-import java.util.jar.{ JarEntry, JarOutputStream, Attributes }
-import Attributes.Name
+import java.util.jar.Attributes.Name
import scala.language.postfixOps
+/** Can't output a file due to the state of the file system. */
+class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg)
+
/** For the last mile: turning generated bytecode in memory into
* something you can use. Has implementations for writing to class
* files, jars, and disassembled/javap output.
@@ -22,22 +22,37 @@ trait BytecodeWriters {
val global: Global
import global._
- private def outputDirectory(sym: Symbol): AbstractFile = (
- settings.outputDirs.outputDirFor(beforeFlatten(sym.sourceFile))
- )
- private def getFile(base: AbstractFile, /*cls.getName()*/ clsName: String, suffix: String): AbstractFile = {
+ private def outputDirectory(sym: Symbol): AbstractFile =
+ settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile)
+
+ /**
+ * @param clsName cls.getName
+ */
+ private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ def ensureDirectory(dir: AbstractFile): AbstractFile =
+ if (dir.isDirectory) dir
+ else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir)
var dir = base
val pathParts = clsName.split("[./]").toList
- for (part <- pathParts.init) {
- dir = dir.subdirectoryNamed(part)
- }
- dir.fileNamed(pathParts.last + suffix)
+ for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part
+ ensureDirectory(dir) fileNamed pathParts.last + suffix
}
- private def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
+ def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
getFile(outputDirectory(sym), clsName, suffix)
+ def factoryNonJarBytecodeWriter(): BytecodeWriter = {
+ val emitAsmp = settings.Ygenasmp.isSetByUser
+ val doDump = settings.Ydumpclasses.isSetByUser
+ (emitAsmp, doDump) match {
+ case (false, false) => new ClassBytecodeWriter { }
+ case (false, true ) => new ClassBytecodeWriter with DumpBytecodeWriter { }
+ case (true, false) => new ClassBytecodeWriter with AsmpBytecodeWriter
+ case (true, true ) => new ClassBytecodeWriter with AsmpBytecodeWriter with DumpBytecodeWriter { }
+ }
+ }
+
trait BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol): Unit
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile): Unit
def close(): Unit = ()
}
@@ -48,7 +63,9 @@ trait BytecodeWriters {
)
val writer = new Jar(jfile).jarWriter(jarMainAttrs: _*)
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile == null,
+ "The outfile formal param is there just because ClassBytecodeWriter overrides this method and uses it.")
val path = jclassName + ".class"
val out = writer.newOutputStream(path)
@@ -60,33 +77,47 @@ trait BytecodeWriters {
override def close() = writer.close()
}
- trait JavapBytecodeWriter extends BytecodeWriter {
- val baseDir = Directory(settings.Ygenjavap.value).createDirectory()
-
- def emitJavap(bytes: Array[Byte], javapFile: io.File) {
- val pw = javapFile.printWriter()
- val javap = new JavapClass(ScalaClassLoader.appLoader, pw) {
- override def findBytes(path: String): Array[Byte] = bytes
+ /*
+ * The ASM textual representation for bytecode overcomes disadvantages of javap ouput in three areas:
+ * (a) pickle dingbats undecipherable to the naked eye;
+ * (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
+ * (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap,
+ * their expansion by ASM is more readable.
+ *
+ * */
+ trait AsmpBytecodeWriter extends BytecodeWriter {
+ import scala.tools.asm
+
+ private val baseDir = Directory(settings.Ygenasmp.value).createDirectory()
+
+ private def emitAsmp(jclassBytes: Array[Byte], asmpFile: io.File) {
+ val pw = asmpFile.printWriter()
+ try {
+ val cnode = new asm.tree.ClassNode()
+ val cr = new asm.ClassReader(jclassBytes)
+ cr.accept(cnode, 0)
+ val trace = new scala.tools.asm.util.TraceClassVisitor(new java.io.PrintWriter(new java.io.StringWriter()))
+ cnode.accept(trace)
+ trace.p.print(pw)
}
-
- try javap(Seq("-verbose", "dummy")) foreach (_.show())
finally pw.close()
}
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
- val bytes = getFile(sym, jclassName, ".class").toByteArray
- val segments = jclassName.split("[./]")
- val javapFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "javap" toFile;
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
+
+ val segments = jclassName.split("[./]")
+ val asmpFile = segments.foldLeft(baseDir: Path)(_ / _) changeExtension "asmp" toFile;
- javapFile.parent.createDirectory()
- emitJavap(bytes, javapFile)
+ asmpFile.parent.createDirectory()
+ emitAsmp(jclassBytes, asmpFile)
}
}
trait ClassBytecodeWriter extends BytecodeWriter {
- def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- val outfile = getFile(sym, jclassName, ".class")
+ def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ assert(outfile != null,
+ "Precisely this override requires its invoker to hand out a non-null AbstractFile.")
val outstream = new DataOutputStream(outfile.bufferedOutput)
try outstream.write(jclassBytes, 0, jclassBytes.length)
@@ -98,11 +129,11 @@ trait BytecodeWriters {
trait DumpBytecodeWriter extends BytecodeWriter {
val baseDir = Directory(settings.Ydumpclasses.value).createDirectory()
- abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], sym: Symbol) {
- super.writeClass(label, jclassName, jclassBytes, sym)
+ abstract override def writeClass(label: String, jclassName: String, jclassBytes: Array[Byte], outfile: AbstractFile) {
+ super.writeClass(label, jclassName, jclassBytes, outfile)
val pathName = jclassName
- var dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
+ val dumpFile = pathName.split("[./]").foldLeft(baseDir: Path) (_ / _) changeExtension "class" toFile;
dumpFile.parent.createDirectory()
val outstream = new DataOutputStream(new FileOutputStream(dumpFile.path))
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 7c46d648fe..7edcc944e1 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -3,17 +3,16 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package backend.jvm
-import java.nio.ByteBuffer
import scala.collection.{ mutable, immutable }
import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
import scala.tools.nsc.symtab._
-import scala.tools.nsc.io.AbstractFile
-
import scala.tools.asm
import asm.Label
+import scala.annotation.tailrec
/**
* @author Iulian Dragos (version 1.0, FJBG-based implementation)
@@ -27,11 +26,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import icodes.opcodes._
import definitions._
+ // Strangely I can't find this in the asm code
+ // 255, but reserving 1 for "this"
+ final val MaximumJvmParameters = 254
+
val phaseName = "jvm"
/** Create a new phase */
override def newPhase(p: Phase): Phase = new AsmPhase(p)
+ /** From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ private val androidFieldName = newTermName("CREATOR")
+
+ private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
/** JVM code generation phase
*/
class AsmPhase(prev: Phase) extends ICodePhase(prev) {
@@ -39,7 +52,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
override def erasedTypes = true
def apply(cls: IClass) = sys.error("no implementation")
- val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+ // An AsmPhase starts and ends within a Run, thus the caches in question will get populated and cleared within a Run, too), SI-7422
+ javaNameCache.clear()
+ javaNameCache ++= List(
+ NothingClass -> binarynme.RuntimeNothing,
+ RuntimeNothingClass -> binarynme.RuntimeNothing,
+ NullClass -> binarynme.RuntimeNull,
+ RuntimeNullClass -> binarynme.RuntimeNull
+ )
+
+ // unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
+ reverseJavaName.clear()
+ reverseJavaName ++= List(
+ binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
+ binarynme.RuntimeNull.toString() -> RuntimeNullClass
+ )
+
+ // Lazy val; can't have eager vals in Phase constructors which may
+ // cause cycles before Global has finished initialization.
+ lazy val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
private def initBytecodeWriter(entryPoints: List[IClass]): BytecodeWriter = {
settings.outputDirs.getSingleOutput match {
@@ -61,29 +92,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
new DirectToJarfileWriter(f.file)
- case _ =>
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else new ClassBytecodeWriter with JavapBytecodeWriter { }
-
- // TODO A ScalapBytecodeWriter could take asm.util.Textifier as starting point.
- // Three areas where javap ouput is less than ideal (e.g. when comparing versions of the same classfile) are:
- // (a) unreadable pickle;
- // (b) two constant pools, while having identical contents, are displayed differently due to physical layout.
- // (c) stack maps (classfile version 50 and up) are displayed in encoded form by javap, their expansion makes more sense instead.
+ case _ => factoryNonJarBytecodeWriter()
}
}
override def run() {
- if (settings.debug.value)
+ if (settings.debug)
inform("[running phase " + name + " on icode]")
- if (settings.Xdce.value)
+ if (settings.Xdce)
for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
log(s"Optimizer eliminated ${sym.fullNameString}")
icodes.classes -= sym
@@ -99,41 +117,41 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
"Such classes will overwrite one another on case-insensitive filesystems.")
}
- debuglog("Created new bytecode generator for " + classes.size + " classes.")
+ debuglog(s"Created new bytecode generator for ${classes.size} classes.")
val bytecodeWriter = initBytecodeWriter(sortedClasses filter isJavaEntryPoint)
- val plainCodeGen = new JPlainBuilder(bytecodeWriter)
- val mirrorCodeGen = new JMirrorBuilder(bytecodeWriter)
- val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter)
-
- while(!sortedClasses.isEmpty) {
- val c = sortedClasses.head
+ val needsOutfile = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+ val plainCodeGen = new JPlainBuilder( bytecodeWriter, needsOutfile)
+ val mirrorCodeGen = new JMirrorBuilder( bytecodeWriter, needsOutfile)
+ val beanInfoCodeGen = new JBeanInfoBuilder(bytecodeWriter, needsOutfile)
+ def emitFor(c: IClass) {
if (isStaticModule(c.symbol) && isTopLevelModule(c.symbol)) {
- if (c.symbol.companionClass == NoSymbol) {
- mirrorCodeGen.genMirrorClass(c.symbol, c.cunit)
- } else {
- log("No mirror class for module with linked class: " + c.symbol.fullName)
- }
+ if (c.symbol.companionClass == NoSymbol)
+ mirrorCodeGen genMirrorClass (c.symbol, c.cunit)
+ else
+ log(s"No mirror class for module with linked class: ${c.symbol.fullName}")
}
+ plainCodeGen genClass c
+ if (c.symbol hasAnnotation BeanInfoAttr) beanInfoCodeGen genBeanInfoClass c
+ }
- plainCodeGen.genClass(c)
-
- if (c.symbol hasAnnotation BeanInfoAttr) {
- beanInfoCodeGen.genBeanInfoClass(c)
+ while (!sortedClasses.isEmpty) {
+ val c = sortedClasses.head
+ try emitFor(c)
+ catch {
+ case e: FileConflictException =>
+ c.cunit.error(c.symbol.pos, s"error writing ${c.symbol}: ${e.getMessage}")
}
-
sortedClasses = sortedClasses.tail
classes -= c.symbol // GC opportunity
}
bytecodeWriter.close()
- classes.clear()
- reverseJavaName.clear()
/* don't javaNameCache.clear() because that causes the following tests to fail:
* test/files/run/macro-repl-dontexpand.scala
* test/files/jvm/interpreter.scala
- * TODO but why? what use could javaNameCache possibly see once GenJVM is over?
+ * TODO but why? what use could javaNameCache possibly see once GenASM is over?
*/
/* TODO After emitting all class files (e.g., in a separate compiler phase) ASM can perform bytecode verification:
@@ -152,19 +170,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var pickledBytes = 0 // statistics
- // Don't put this in per run caches. Contains entries for classes as well as members.
- val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
+ val javaNameCache = perRunCaches.newMap[Symbol, Name]()
// unlike javaNameCache, reverseJavaName contains entries only for class symbols and their internal names.
- val reverseJavaName = mutable.Map.empty[String, Symbol] ++= List(
- binarynme.RuntimeNothing.toString() -> RuntimeNothingClass, // RuntimeNothingClass is the bytecode-level return type of Scala methods with Nothing return-type.
- binarynme.RuntimeNull.toString() -> RuntimeNullClass
- )
+ val reverseJavaName = perRunCaches.newMap[String, Symbol]()
private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
private def hasPublicBitSet(flags: Int) = (flags & asm.Opcodes.ACC_PUBLIC) != 0
@@ -248,7 +257,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def isTopLevelModule(sym: Symbol): Boolean =
- afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+ exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
def isStaticModule(sym: Symbol): Boolean = {
sym.isModuleClass && !sym.isImplClass && !sym.isLifted
@@ -283,7 +292,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def inameToSymbol(iname: String): Symbol = {
val name = global.newTypeName(iname)
val res0 =
- if (nme.isModuleName(name)) rootMirror.getModule(nme.stripModuleSuffix(name))
+ if (nme.isModuleName(name)) rootMirror.getModule(name.dropModule)
else rootMirror.getClassByName(name.replace('/', '.')) // TODO fails for inner classes (but this hasn't been tested).
assert(res0 != NoSymbol)
val res = jsymbol(res0)
@@ -368,7 +377,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
private val classfileVersion: Int = settings.target.value match {
case "jvm-1.5" => asm.Opcodes.V1_5
- case "jvm-1.5-asm" => asm.Opcodes.V1_5
case "jvm-1.6" => asm.Opcodes.V1_6
case "jvm-1.7" => asm.Opcodes.V1_7
}
@@ -396,9 +404,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
/** basic functionality for class file building */
- abstract class JBuilder(bytecodeWriter: BytecodeWriter) {
+ abstract class JBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) {
- val EMPTY_JTYPE_ARRAY = Array.empty[asm.Type]
val EMPTY_STRING_ARRAY = Array.empty[String]
val mdesc_arglessvoid = "()V"
@@ -443,19 +450,22 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
- val dest = new Array[Byte](len);
- System.arraycopy(b, offset, dest, 0, len);
+ val dest = new Array[Byte](len)
+ System.arraycopy(b, offset, dest, 0, len)
new asm.CustomAttr(name, dest)
}
// -----------------------------------------------------------------------------------------
- // utitilies useful when emitting plain, mirror, and beaninfo classes.
+ // utilities useful when emitting plain, mirror, and beaninfo classes.
// -----------------------------------------------------------------------------------------
def writeIfNotTooBig(label: String, jclassName: String, jclass: asm.ClassWriter, sym: Symbol) {
try {
val arr = jclass.toByteArray()
- bytecodeWriter.writeClass(label, jclassName, arr, sym)
+ val outF: scala.tools.nsc.io.AbstractFile = {
+ if(needsOutfile) getFile(sym, jclassName, ".class") else null
+ }
+ bytecodeWriter.writeClass(label, jclassName, arr, outF)
} catch {
case e: java.lang.RuntimeException if(e.getMessage() == "Class file too large!") =>
// TODO check where ASM throws the equivalent of CodeSizeTooBigException
@@ -466,7 +476,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** Specialized array conversion to prevent calling
* java.lang.reflect.Array.newInstance via TraversableOnce.toArray
*/
- def mkArray(xs: Traversable[asm.Type]): Array[asm.Type] = { val a = new Array[asm.Type](xs.size); xs.copyToArray(a); a }
def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
// -----------------------------------------------------------------------------------------
@@ -509,14 +518,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
def javaName(sym: Symbol): String = {
- /**
+ /*
* Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
*
* Note: This method is called recursively thus making sure that we add complete chain
* of inner class all until root class.
*/
def collectInnerClass(s: Symbol): Unit = {
- // TODO: some beforeFlatten { ... } which accounts for
+ // TODO: some enteringFlatten { ... } which accounts for
// being nested in parameterized classes (if we're going to selectively flatten.)
val x = innerClassSymbolFor(s)
if(x ne NoSymbol) {
@@ -531,7 +540,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
collectInnerClass(sym)
- var hasInternalName = (sym.isClass || (sym.isModule && !sym.isMethod))
+ val hasInternalName = sym.isClass || sym.isModuleNotMethod
val cachedJN = javaNameCache.getOrElseUpdate(sym, {
if (hasInternalName) { sym.javaBinaryName }
else { sym.javaSimpleName }
@@ -541,12 +550,18 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val internalName = cachedJN.toString()
val trackedSym = jsymbol(sym)
reverseJavaName.get(internalName) match {
- case None =>
+ case Some(oldsym) if oldsym.exists && trackedSym.exists =>
+ assert(
+ // In contrast, neither NothingClass nor NullClass show up bytecode-level.
+ (oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) || (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)),
+ s"""|Different class symbols have the same bytecode-level internal name:
+ | name: $internalName
+ | oldsym: ${oldsym.fullNameString}
+ | tracked: ${trackedSym.fullNameString}
+ """.stripMargin
+ )
+ case _ =>
reverseJavaName.put(internalName, trackedSym)
- case Some(oldsym) =>
- assert((oldsym == trackedSym) || (oldsym == RuntimeNothingClass) || (oldsym == RuntimeNullClass) ||
- (oldsym.isModuleClass && (oldsym.sourceModule == trackedSym.sourceModule)), // In contrast, neither NothingClass nor NullClass show up bytecode-level.
- "how can getCommonSuperclass() do its job if different class symbols get the same bytecode-level internal name: " + internalName)
}
}
@@ -588,7 +603,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def javaType(s: Symbol): asm.Type = {
if (s.isMethod) {
- val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType);
+ val resT: asm.Type = if (s.isClassConstructor) asm.Type.VOID_TYPE else javaType(s.tpe.resultType)
asm.Type.getMethodType( resT, (s.tpe.paramTypes map javaType): _*)
} else { javaType(s.tpe) }
}
@@ -598,9 +613,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
def addInnerClasses(csym: Symbol, jclass: asm.ClassVisitor) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ /* The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
*/
def outerName(innerSym: Symbol): String = {
if (innerSym.originalEnclosingMethod != NoSymbol)
@@ -619,7 +634,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
innerSym.rawname + innerSym.moduleSuffix
// add inner classes which might not have been referenced yet
- afterErasure {
+ exitingErasure {
for (sym <- List(csym, csym.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
innerClassBuffer += m
}
@@ -681,7 +696,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** functionality for building plain and mirror classes */
- abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ abstract class JCommonBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
def debugLevel = settings.debuginfo.indexOfChoice
@@ -793,7 +808,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// without it. This is particularly bad because the availability of
// generic information could disappear as a consequence of a seemingly
// unrelated change.
- settings.Ynogenericsig.value
+ settings.Ynogenericsig
|| sym.isArtifact
|| sym.isLiftedMethod
|| sym.isBridge
@@ -810,7 +825,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (!needsGenericSignature(sym)) { return null }
- val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
+ val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
if (jsOpt.isEmpty) { return null }
@@ -823,14 +838,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
catch { case _: Throwable => false }
}
- if (settings.Xverify.value) {
+ if (settings.Xverify) {
// Run the signature parser to catch bogus signatures.
val isValidSignature = wrap {
// Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
- import scala.tools.asm.util.SignatureChecker
- if (sym.isMethod) { SignatureChecker checkMethodSignature sig } // requires asm-util.jar
- else if (sym.isTerm) { SignatureChecker checkFieldSignature sig }
- else { SignatureChecker checkClassSignature sig }
+ import scala.tools.asm.util.CheckClassAdapter
+ if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig } // requires asm-util.jar
+ else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
+ else { CheckClassAdapter checkClassSignature sig }
}
if(!isValidSignature) {
@@ -844,7 +859,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if ((settings.check containsName phaseName)) {
- val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
+ val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
val bytecodeTpe = owner.thisType.memberInfo(sym)
if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
getCurrentCUnit().warning(sym.pos,
@@ -863,9 +878,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
- val ca = new Array[Char](bytes.size)
+ val ca = new Array[Char](bytes.length)
var idx = 0
- while(idx < bytes.size) {
+ while(idx < bytes.length) {
val b: Byte = bytes(idx)
assert((b & ~0x7f) == 0)
ca(idx) = b.asInstanceOf[Char]
@@ -882,7 +897,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
var prevOffset = 0
var offset = 0
var encLength = 0
- while(offset < bSeven.size) {
+ while(offset < bSeven.length) {
val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1)
val newEncLength = encLength.toLong + deltaEncLength
if(newEncLength >= 65535) {
@@ -1034,9 +1049,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val paramJavaTypes: List[asm.Type] = methodInfo.paramTypes map javaType
// val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
- /** Forwarders must not be marked final,
- * as the JVM will not allow redefinition of a final static method,
- * and we don't know what classes might be subclassing the companion class. See SI-4827.
+ /* Forwarders must not be marked final,
+ * as the JVM will not allow redefinition of a final static method,
+ * and we don't know what classes might be subclassing the companion class. See SI-4827.
*/
// TODO: evaluate the other flags we might be dropping on the floor here.
// TODO: ACC_SYNTHETIC ?
@@ -1099,7 +1114,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
debuglog("Dumping mirror class for object: " + moduleClass)
val linkedClass = moduleClass.companionClass
- val linkedModule = linkedClass.companionSymbol
lazy val conflictingNames: Set[Name] = {
(linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
}
@@ -1125,16 +1139,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
trait JAndroidBuilder {
self: JPlainBuilder =>
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
- * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val androidFieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
def isAndroidParcelableClass(sym: Symbol) =
(AndroidParcelableInterface != NoSymbol) &&
(sym.parentSymbols contains AndroidParcelableInterface)
@@ -1142,13 +1146,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/* Typestate: should be called before emitting fields (because it adds an IField to the current IClass). */
def addCreatorCode(block: BasicBlock) {
val fieldSymbol = (
- clasz.symbol.newValue(newTermName(androidFieldName), NoPosition, Flags.STATIC | Flags.FINAL)
+ clasz.symbol.newValue(androidFieldName, NoPosition, Flags.STATIC | Flags.FINAL)
setInfo AndroidCreatorClass.tpe
)
val methodSymbol = definitions.getMember(clasz.symbol.companionModule, androidFieldName)
clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(false))
- block emit STORE_FIELD(fieldSymbol, true)
+ block emit CALL_METHOD(methodSymbol, Static(onInstance = false))
+ block emit STORE_FIELD(fieldSymbol, isStatic = true)
}
def legacyAddCreatorCode(clinit: asm.MethodVisitor) {
@@ -1157,7 +1161,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jclass.visitField(
PublicStaticFinal,
- androidFieldName,
+ androidFieldName.toString,
tdesc_creator,
null, // no java-generic-signature
null // no initial value
@@ -1177,7 +1181,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
clinit.visitMethodInsn(
asm.Opcodes.INVOKEVIRTUAL,
moduleName,
- androidFieldName,
+ androidFieldName.toString,
asm.Type.getMethodDescriptor(creatorType, Array.empty[asm.Type]: _*)
)
@@ -1185,7 +1189,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
clinit.visitFieldInsn(
asm.Opcodes.PUTSTATIC,
thisName,
- androidFieldName,
+ androidFieldName.toString,
tdesc_creator
)
}
@@ -1242,8 +1246,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case class BlockInteval(start: BasicBlock, end: BasicBlock)
/** builder of plain classes */
- class JPlainBuilder(bytecodeWriter: BytecodeWriter)
- extends JCommonBuilder(bytecodeWriter)
+ class JPlainBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean)
+ extends JCommonBuilder(bytecodeWriter, needsOutfile)
with JAndroidBuilder {
val MIN_SWITCH_DENSITY = 0.7
@@ -1267,14 +1271,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// Additional interface parents based on annotations and other cues
def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(CloneableClass)
case RemoteAttr => Some(RemoteInterfaceClass)
case _ => None
}
- /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
+ /* Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
+ * This is important on Android because there is otherwise an interface explosion.
*/
def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
var rest = lstIfaces
@@ -1292,7 +1294,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
val ps = c.symbol.info.parents
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
+ val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses
val superInterfaces = (superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol))).distinct
if(superInterfaces.isEmpty) EMPTY_STRING_ARRAY
@@ -1317,7 +1319,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
thisName = javaName(c.symbol) // the internal name of the class being emitted
val ps = c.symbol.info.parents
- val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol);
+ val superClass: String = if(ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else javaName(ps.head.typeSymbol)
val ifaces = getSuperInterfaces(c)
@@ -1364,14 +1366,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (constructor <- c.lookupStaticCtor) {
addStaticInit(Some(constructor))
}
- val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders.value)
+ val skipStaticForwarders = (c.symbol.isInterface || settings.noForwarders)
if (!skipStaticForwarders) {
val lmoc = c.symbol.companionModule
// add static forwarders if there are no name conflicts; see bugs #363 and #1735
if (lmoc != NoSymbol) {
// it must be a top level class (name contains no $s)
val isCandidateForForwarders = {
- afterPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
}
if (isCandidateForForwarders) {
log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
@@ -1400,7 +1402,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
addInnerClasses(clasz.symbol, jclass)
jclass.visitEnd()
writeIfNotTooBig("" + c.symbol.name, thisName, jclass, c.symbol)
-
}
/**
@@ -1432,7 +1433,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
assert(enclClass.isClass, enclClass)
val sym = enclClass.primaryConstructor
if (sym == NoSymbol) {
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass, clazz))
+ log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(enclClass))
} else {
debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
res = EnclMethodEntry(javaName(enclClass), javaName(sym), javaType(sym))
@@ -1483,6 +1484,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
+ if (m.params.size > MaximumJvmParameters) {
+ getCurrentCUnit().error(m.symbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ return
+ }
+
debuglog("Generating method " + m.symbol.fullName)
method = m
computeLocalVarsIndex(m)
@@ -1607,19 +1613,20 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (isStaticModule(clasz.symbol)) {
// call object's private ctor from static ctor
lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
+ lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(onInstance = true))
}
if (isParcelableClass) { addCreatorCode(lastBlock) }
lastBlock emit RETURN(UNIT)
- lastBlock.close
+ lastBlock.close()
- method = m
+ method = m
jmethod = clinitMethod
jMethodName = CLASS_CONSTRUCTOR_NAME
jmethod.visitCode()
- genCode(m, false, true)
+ computeLocalVarsIndex(m)
+ genCode(m, emitVars = false, isStatic = true)
jmethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
jmethod.visitEnd()
@@ -1654,8 +1661,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case BooleanTag => jcode.boolconst(const.booleanValue)
- case ByteTag => jcode.iconst(const.byteValue)
- case ShortTag => jcode.iconst(const.shortValue)
+ case ByteTag => jcode.iconst(const.byteValue.toInt)
+ case ShortTag => jcode.iconst(const.shortValue.toInt)
case CharTag => jcode.iconst(const.charValue)
case IntTag => jcode.iconst(const.intValue)
@@ -1675,7 +1682,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val kind = toTypeKind(const.typeValue)
val toPush: asm.Type =
if (kind.isValueType) classLiteral(kind)
- else javaType(kind);
+ else javaType(kind)
mv.visitLdcInsn(toPush)
case EnumTag =>
@@ -1698,15 +1705,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
object jcode {
- import asm.Opcodes;
-
- def aconst(cst: AnyRef) {
- if (cst == null) { jmethod.visitInsn(Opcodes.ACONST_NULL) }
- else { jmethod.visitLdcInsn(cst) }
- }
+ import asm.Opcodes
final def boolconst(b: Boolean) { iconst(if(b) 1 else 0) }
+ def iconst(cst: Char) { iconst(cst.toInt) }
def iconst(cst: Int) {
if (cst >= -1 && cst <= 5) {
jmethod.visitInsn(Opcodes.ICONST_0 + cst)
@@ -1798,8 +1801,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
- def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
- def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ def emitIF(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF(), label) }
+ def emitIF_ICMP(cond: TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) }
def emitIF_ACMP(cond: TestOp, label: asm.Label) {
assert((cond == EQ) || (cond == NE), cond)
val opc = (if(cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
@@ -1855,9 +1858,9 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val keyMax = keys(keys.length - 1)
val isDenseEnough: Boolean = {
- /** Calculate in long to guard against overflow. TODO what overflow??? */
+ /* Calculate in long to guard against overflow. TODO what overflow??? */
val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
- val klenD: Double = keys.length
+ val klenD: Double = keys.length.toDouble
val kdensity: Double = (klenD / keyRangeD)
kdensity >= minDensity
@@ -1867,10 +1870,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// use a table in which holes are filled with defaultBranch.
val keyRange = (keyMax - keyMin + 1)
val newBranches = new Array[asm.Label](keyRange)
- var oldPos = 0;
+ var oldPos = 0
var i = 0
while(i < keyRange) {
- val key = keyMin + i;
+ val key = keyMin + i
if (keys(oldPos) == key) {
newBranches(i) = branches(oldPos)
oldPos += 1
@@ -1990,7 +1993,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// Part 2 of genCode(): demarcating exception handler boundaries (visitTryCatchBlock() must be invoked before visitLabel() in genBlock())
// ------------------------------------------------------------------------------------------------------------
- /**Generate exception handlers for the current method.
+ /* Generate exception handlers for the current method.
*
* Quoting from the JVMS 4.7.3 The Code Attribute
* The items of the Code_attribute structure are as follows:
@@ -2013,16 +2016,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
*/
def genExceptionHandlers() {
- /** Return a list of pairs of intervals where the handler is active.
- * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
- * Preconditions:
- * - e.covered non-empty
- * Postconditions for the result:
- * - always non-empty
- * - intervals are sorted as per `linearization`
- * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
- * ie. between any two intervals in the result there is a non-empty gap.
- * - each of the `covered` blocks in the argument is contained in some interval in the result
+ /* Return a list of pairs of intervals where the handler is active.
+ * Each interval is closed on both ends, ie. inclusive both in the left and right endpoints: [start, end].
+ * Preconditions:
+ * - e.covered non-empty
+ * Postconditions for the result:
+ * - always non-empty
+ * - intervals are sorted as per `linearization`
+ * - the argument's `covered` blocks have been grouped into maximally contiguous intervals,
+ * ie. between any two intervals in the result there is a non-empty gap.
+ * - each of the `covered` blocks in the argument is contained in some interval in the result
*/
def intervals(e: ExceptionHandler): List[BlockInteval] = {
assert(e.covered.nonEmpty, e)
@@ -2069,7 +2072,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// TODO in that case, ExceptionHandler.cls doesn't go through javaName(). What if cls is an inner class?
for (e <- this.method.exh ; if e.covered.nonEmpty ; p <- intervals(e)) {
debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p.start + " to: " + p.end + " catching: " + e.cls);
+ " from: " + p.start + " to: " + p.end + " catching: " + e.cls)
val cls: String = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
else javaName(e.cls)
jmethod.visitTryCatchBlock(labels(p.start), linNext(p.end), labels(e.startBlock), cls)
@@ -2093,8 +2096,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def overlaps(that: Interval): Boolean = { !(this.precedes(that) || that.precedes(this)) }
def mergeWith(that: Interval): Interval = {
- val newStart = if(this.start <= that.start) this.lstart else that.lstart;
- val newEnd = if(this.end <= that.end) that.lend else this.lend;
+ val newStart = if(this.start <= that.start) this.lstart else that.lstart
+ val newEnd = if(this.end <= that.end) that.lend else this.lend
Interval(newStart, newEnd)
}
@@ -2150,7 +2153,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def getMerged(): scala.collection.Map[Local, List[Interval]] = {
// TODO should but isn't: unbalanced start(s) of scope(s)
- val shouldBeEmpty = pending filter { p => val Pair(k, st) = p; st.nonEmpty };
+ val shouldBeEmpty = pending filter { p => val Pair(_, st) = p; st.nonEmpty }
val merged = mutable.Map[Local, List[Interval]]()
def addToMerged(lv: Local, start: Label, end: Label) {
val intv = Interval(start, end)
@@ -2168,10 +2171,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if(merged.isDefinedAt(k)) {
val balancedStart = merged(k).head.lstart
if(balancedStart.getOffset < start.getOffset) {
- start = balancedStart;
+ start = balancedStart
}
}
- val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend;
+ val endOpt: Option[Label] = for(ranges <- merged.get(k)) yield ranges.last.lend
val end = endOpt.getOrElse(onePastLast)
addToMerged(k, start, end)
}
@@ -2204,7 +2207,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for(Pair(local, ranges) <- scoping.getMerged()) {
var name = javaName(local.sym)
if (name == null) {
- anonCounter += 1;
+ anonCounter += 1
name = "<anon" + anonCounter + ">"
}
for(intrvl <- ranges) {
@@ -2213,7 +2216,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
// quest for deterministic output that Map.toList doesn't provide (so that ant test.stability doesn't complain).
val srtd = fltnd.sortBy { kr =>
- val Triple(name: String, local: Local, intrvl: Interval) = kr
+ val Triple(name: String, _, intrvl: Interval) = kr
Triple(intrvl.start, intrvl.end - intrvl.start, name) // ie sort by (start, length, name)
}
@@ -2244,13 +2247,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
}
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- } // end of genCode()'s isAccessibleFrom()
-
def genCallMethod(call: CALL_METHOD) {
val CALL_METHOD(method, style) = call
val siteSymbol = clasz.symbol
@@ -2333,6 +2329,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
import asm.Opcodes
(instr.category: @scala.annotation.switch) match {
+
case icodes.localsCat =>
def genLocalInstr() = (instr: @unchecked) match {
case THIS(_) => jmethod.visitVarInsn(Opcodes.ALOAD, 0)
@@ -2364,14 +2361,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
scoping.popScope(lv, end, instr.pos)
}
}
- genLocalInstr
+ genLocalInstr()
case icodes.stackCat =>
def genStackInstr() = (instr: @unchecked) match {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
+ debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
jmethod.visitVarInsn(Opcodes.ALOAD, 0)
} else {
@@ -2388,7 +2385,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case LOAD_EXCEPTION(_) => ()
}
- genStackInstr
+ genStackInstr()
case icodes.constCat => genConstant(jmethod, instr.asInstanceOf[CONSTANT].constant)
@@ -2422,11 +2419,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genCastInstr
+ genCastInstr()
case icodes.objsCat =>
def genObjsInstr() = (instr: @unchecked) match {
-
case BOX(kind) =>
val MethodNameAndType(mname, mdesc) = jBoxTo(kind)
jcode.invokestatic(BoxesRunTime, mname, mdesc)
@@ -2442,14 +2438,14 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case MONITOR_ENTER() => emit(Opcodes.MONITORENTER)
case MONITOR_EXIT() => emit(Opcodes.MONITOREXIT)
}
- genObjsInstr
+ genObjsInstr()
case icodes.fldsCat =>
def genFldsInstr() = (instr: @unchecked) match {
case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner + " flags: " + Flags.flagsToString(field.owner.flags))
+ val owner = javaName(lf.hostClass)
+ debuglog("LOAD_FIELD with owner: " + owner + " flags: " + field.owner.flagString)
val fieldJName = javaName(field)
val fieldDescr = descriptor(field)
val opc = if (isStatic) Opcodes.GETSTATIC else Opcodes.GETFIELD
@@ -2463,12 +2459,12 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jmethod.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
}
- genFldsInstr
+ genFldsInstr()
case icodes.mthdsCat =>
def genMethodsInstr() = (instr: @unchecked) match {
- /** Special handling to access native Array.clone() */
+ /* Special handling to access native Array.clone() */
case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
val target: String = javaType(call.targetTypeKind).getInternalName
jcode.invokevirtual(target, "clone", mdesc_arrayClone)
@@ -2476,7 +2472,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case call @ CALL_METHOD(method, style) => genCallMethod(call)
}
- genMethodsInstr
+ genMethodsInstr()
case icodes.arraysCat =>
def genArraysInstr() = (instr: @unchecked) match {
@@ -2485,7 +2481,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case CREATE_ARRAY(elem, 1) => jcode newarray elem
case CREATE_ARRAY(elem, dims) => jmethod.visitMultiANewArrayInsn(descriptor(ArrayN(elem, dims)), dims)
}
- genArraysInstr
+ genArraysInstr()
case icodes.jumpsCat =>
def genJumpInstr() = (instr: @unchecked) match {
@@ -2502,7 +2498,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
while (restTagss.nonEmpty) {
val currLabel = labels(restBranches.head)
for (cTag <- restTagss.head) {
- flatKeys(k) = cTag;
+ flatKeys(k) = cTag
flatBranches(k) = currLabel
k += 1
}
@@ -2515,27 +2511,19 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
jcode.emitSWITCH(flatKeys, flatBranches, defaultLabel, MIN_SWITCH_DENSITY)
case JUMP(whereto) =>
- if (nextBlock != whereto) {
+ if (nextBlock != whereto)
jcode goTo labels(whereto)
- } else if (m.exh.exists(eh => eh.covers(b))) {
// SI-6102: Determine whether eliding this JUMP results in an empty range being covered by some EH.
// If so, emit a NOP in place of the elided JUMP, to avoid "java.lang.ClassFormatError: Illegal exception table range"
- val isSthgLeft = b.toList.exists {
- case _: LOAD_EXCEPTION => false
- case _: SCOPE_ENTER => false
- case _: SCOPE_EXIT => false
- case _: JUMP => false
- case _ => true
- }
- if (!isSthgLeft) {
- emit(asm.Opcodes.NOP)
- }
+ else if (newNormal.isJumpOnly(b) && m.exh.exists(eh => eh.covers(b))) {
+ debugwarn("Had a jump only block that wasn't collapsed")
+ emit(asm.Opcodes.NOP)
}
case CJUMP(success, failure, cond, kind) =>
if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF_ICMP(cond.negate, labels(failure))
+ jcode.emitIF_ICMP(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ICMP(cond, labels(success))
@@ -2543,7 +2531,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
} else if (kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
if (nextBlock == success) {
- jcode.emitIF_ACMP(cond.negate, labels(failure))
+ jcode.emitIF_ACMP(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF_ACMP(cond, labels(success))
@@ -2560,7 +2548,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else emit(Opcodes.DCMPL)
}
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
// .. and fall through to success label
} else {
jcode.emitIF(cond, labels(success))
@@ -2571,7 +2559,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case CZJUMP(success, failure, cond, kind) =>
if (kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
} else {
jcode.emitIF(cond, labels(success))
if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2607,7 +2595,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
else emit(Opcodes.DCMPL)
}
if (nextBlock == success) {
- jcode.emitIF(cond.negate, labels(failure))
+ jcode.emitIF(cond.negate(), labels(failure))
} else {
jcode.emitIF(cond, labels(success))
if (nextBlock != failure) { jcode goTo labels(failure) }
@@ -2615,26 +2603,25 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genJumpInstr
+ genJumpInstr()
case icodes.retCat =>
def genRetInstr() = (instr: @unchecked) match {
case RETURN(kind) => jcode emitRETURN kind
case THROW(_) => emit(Opcodes.ATHROW)
}
- genRetInstr
+ genRetInstr()
}
}
- /**
+ /*
* Emits one or more conversion instructions based on the types given as arguments.
*
* @param from The type of the value to be converted into another type.
* @param to The type the value will be converted into.
*/
def emitT2T(from: TypeKind, to: TypeKind) {
- assert(isNonUnitValueTK(from), from)
- assert(isNonUnitValueTK(to), to)
+ assert(isNonUnitValueTK(from) && isNonUnitValueTK(to), s"Cannot emit primitive conversion from $from to $to")
def pickOne(opcs: Array[Int]) {
val chosen = (to: @unchecked) match {
@@ -2650,10 +2637,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
if(from == to) { return }
- if((from == BOOL) || (to == BOOL)) {
- // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
- throw new Error("inconvertible types : " + from.toString() + " -> " + to.toString())
- }
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
if(from.isIntSizedType) { // BYTE, CHAR, SHORT, and INT. (we're done with BOOL already)
@@ -2701,7 +2686,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
def genPrimitive(primitive: Primitive, pos: Position) {
- import asm.Opcodes;
+ import asm.Opcodes
primitive match {
@@ -2732,7 +2717,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
abort("Unknown arithmetic primitive " + primitive)
}
}
- genArith
+ genArith()
// TODO Logical's 2nd elem should be declared ValueTypeKind, to better approximate its allowed values (isIntSized, its comments appears to convey)
// TODO GenICode uses `toTypeKind` to define that elem, `toValueTypeKind` would be needed instead.
@@ -2764,7 +2749,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (kind != BOOL) { emitT2T(INT, kind) }
}
}
- genLogical
+ genLogical()
case Shift(op, kind) =>
def genShift() = op match {
@@ -2793,7 +2778,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
emitT2T(INT, kind)
}
}
- genShift
+ genShift()
case Comparison(op, kind) =>
def genCompare() = op match {
@@ -2813,12 +2798,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
}
- genCompare
+ genCompare()
case Conversion(src, dst) =>
debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) { println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line) }
- else { emitT2T(src, dst) }
+ emitT2T(src, dst)
case ArrayLength(_) => emit(Opcodes.ARRAYLENGTH)
@@ -2867,15 +2851,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
////////////////////// local vars ///////////////////////
- // def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
- // def indexOf(m: IMethod, sym: Symbol): Int = {
- // val Some(local) = m lookupLocal sym
- // indexOf(local)
- // }
-
final def indexOf(local: Local): Int = {
assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
local.index
@@ -2886,7 +2863,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* *Does not assume the parameters come first!*
*/
def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
+ var idx = if (m.symbol.isStaticMember) 0 else 1
for (l <- m.params) {
debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
@@ -2905,10 +2882,10 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
/** builder of mirror classes */
- class JMirrorBuilder(bytecodeWriter: BytecodeWriter) extends JCommonBuilder(bytecodeWriter) {
+ class JMirrorBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JCommonBuilder(bytecodeWriter, needsOutfile) {
private var cunit: CompilationUnit = _
- def getCurrentCUnit(): CompilationUnit = cunit;
+ def getCurrentCUnit(): CompilationUnit = cunit
/** Generate a mirror class for a top-level module. A mirror class is a class
* containing only static methods that forward to the corresponding method
@@ -2930,7 +2907,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
JAVA_LANG_OBJECT.getInternalName,
EMPTY_STRING_ARRAY)
- log("Dumping mirror class for '%s'".format(mirrorName))
+ log(s"Dumping mirror class for '$mirrorName'")
// typestate: entering mode with valid call sequences:
// [ visitSource ] [ visitOuterClass ] ( visitAnnotation | visitAttribute )*
@@ -2953,13 +2930,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
mirrorClass.visitEnd()
writeIfNotTooBig("" + modsym.name, mirrorName, mirrorClass, modsym)
}
-
-
} // end of class JMirrorBuilder
/** builder of bean info classes */
- class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter) extends JBuilder(bytecodeWriter) {
+ class JBeanInfoBuilder(bytecodeWriter: BytecodeWriter, needsOutfile: Boolean) extends JBuilder(bytecodeWriter, needsOutfile) {
/**
* Generate a bean info class that describes the given class.
@@ -3001,8 +2976,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
for (f <- clasz.fields if f.symbol.hasGetter;
g = f.symbol.getter(clasz.symbol);
- s = f.symbol.setter(clasz.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")
+ s = f.symbol.setter(clasz.symbol)
+ if g.isPublic && !(f.symbol.name startsWith "$")
) {
// inserting $outer breaks the bean
fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
@@ -3091,111 +3066,50 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* TODO Eventually, these utilities should be moved to IMethod and reused from normalize() (there's nothing JVM-specific about them).
*/
object newNormal {
-
- def startsWithJump(b: BasicBlock): Boolean = { assert(b.nonEmpty, "empty block"); b.firstInstruction.isInstanceOf[JUMP] }
-
- /** Prune from an exception handler those covered blocks which are jump-only. */
- private def coverWhatCountsOnly(m: IMethod): Boolean = {
- assert(m.hasCode, "code-less method")
-
- var wasReduced = false
- for(h <- m.exh) {
- val shouldntCover = (h.covered filter startsWithJump)
- if(shouldntCover.nonEmpty) {
- wasReduced = true
- h.covered --= shouldntCover // not removing any block on purpose.
- }
- }
-
- wasReduced
+ /**
+ * True if a block is "jump only" which is defined
+ * as being a block that consists only of 0 or more instructions that
+ * won't make it to the JVM followed by a JUMP.
+ */
+ def isJumpOnly(b: BasicBlock): Boolean = {
+ val nonICode = firstNonIcodeOnlyInstructions(b)
+ // by definition a block has to have a jump, conditional jump, return, or throw
+ assert(nonICode.hasNext, "empty block")
+ nonICode.next.isInstanceOf[JUMP]
}
- /** An exception handler is pruned provided any of the following holds:
- * (1) it covers nothing (for example, this may result after removing unreachable blocks)
- * (2) each block it covers is of the form: JUMP(_)
- * Return true iff one or more ExceptionHandlers were removed.
- *
- * A caveat: removing an exception handler, for whatever reason, means that its handler code (even if unreachable)
- * won't be able to cause a class-loading-exception. As a result, behavior can be different.
+ /**
+ * Returns the list of instructions in a block that follow all ICode only instructions,
+ * where an ICode only instruction is one that won't make it to the JVM
*/
- private def elimNonCoveringExh(m: IMethod): Boolean = {
- assert(m.hasCode, "code-less method")
-
- def isRedundant(eh: ExceptionHandler): Boolean = {
- (eh.cls != NoSymbol) && ( // TODO `eh.isFinallyBlock` more readable than `eh.cls != NoSymbol`
- eh.covered.isEmpty
- || (eh.covered forall startsWithJump)
- )
- }
-
- var wasReduced = false
- val toPrune = (m.exh.toSet filter isRedundant)
- if(toPrune.nonEmpty) {
- wasReduced = true
- for(h <- toPrune; r <- h.blocks) { m.code.removeBlock(r) } // TODO m.code.removeExh(h)
- m.exh = (m.exh filterNot toPrune)
- }
-
- wasReduced
+ private def firstNonIcodeOnlyInstructions(b: BasicBlock): Iterator[Instruction] = {
+ def isICodeOnlyInstruction(i: Instruction) = i match {
+ case LOAD_EXCEPTION(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) => true
+ case _ => false
+ }
+ b.iterator dropWhile isICodeOnlyInstruction
}
- private def isJumpOnly(b: BasicBlock): Option[BasicBlock] = {
- b.toList match {
- case JUMP(whereto) :: rest =>
- assert(rest.isEmpty, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
+ /**
+ * Returns the target of a block that is "jump only" which is defined
+ * as being a block that consists only of 0 or more instructions that
+ * won't make it to the JVM followed by a JUMP.
+ *
+ * @param b The basic block to examine
+ * @return Some(target) if b is a "jump only" block or None if it's not
+ */
+ private def getJumpOnlyTarget(b: BasicBlock): Option[BasicBlock] = {
+ val nonICode = firstNonIcodeOnlyInstructions(b)
+ // by definition a block has to have a jump, conditional jump, return, or throw
+ assert(nonICode.nonEmpty, "empty block")
+ nonICode.next match {
+ case JUMP(whereto) =>
+ assert(!nonICode.hasNext, "A block contains instructions after JUMP (looks like enterIgnoreMode() was itself ignored.)")
Some(whereto)
case _ => None
}
}
- private def directSuccStar(b: BasicBlock): List[BasicBlock] = { directSuccStar(List(b)) }
-
- /** Transitive closure of successors potentially reachable due to normal (non-exceptional) control flow.
- Those BBs in the argument are also included in the result */
- private def directSuccStar(starters: Traversable[BasicBlock]): List[BasicBlock] = {
- val result = new mutable.ListBuffer[BasicBlock]
- var toVisit: List[BasicBlock] = starters.toList.distinct
- while(toVisit.nonEmpty) {
- val h = toVisit.head
- toVisit = toVisit.tail
- result += h
- for(p <- h.directSuccessors; if !result.contains(p) && !toVisit.contains(p)) { toVisit = p :: toVisit }
- }
- result.toList
- }
-
- /** Returns:
- * for single-block self-loops, the pair (start, Nil)
- * for other cycles, the pair (backedge-target, basic-blocks-in-the-cycle-except-backedge-target)
- * otherwise a pair consisting of:
- * (a) the endpoint of a (single or multi-hop) chain of JUMPs
- * (such endpoint does not start with a JUMP and therefore is not part of the chain); and
- * (b) the chain (ie blocks to be removed when collapsing the chain of jumps).
- * Precondition: the BasicBlock given as argument starts with an unconditional JUMP.
- */
- private def finalDestination(start: BasicBlock): (BasicBlock, List[BasicBlock]) = {
- assert(startsWithJump(start), "not the start of a (single or multi-hop) chain of JUMPs.")
- var hops: List[BasicBlock] = Nil
- var prev = start
- var done = false
- do {
- done = isJumpOnly(prev) match {
- case Some(dest) =>
- if (dest == start) { return (start, hops) } // leave infinite-loops in place
- hops ::= prev
- if (hops.contains(dest)) {
- // leave infinite-loops in place
- return (dest, hops filterNot (dest eq _))
- }
- prev = dest;
- false
- case None => true
- }
- } while(!done)
-
- (prev, hops)
- }
-
/**
* Collapse a chain of "jump-only" blocks such as:
*
@@ -3211,7 +3125,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* In more detail:
* Starting at each of the entry points (m.startBlock, the start block of each exception handler)
* rephrase those control-flow instructions targeting a jump-only block (which jumps to a final destination D) to target D.
- * The blocks thus skipped are also removed from IMethod.blocks.
+ * The blocks thus skipped become eligible to removed by the reachability analyzer
*
* Rationale for this normalization:
* test/files/run/private-inline.scala after -optimize is chock full of
@@ -3222,106 +3136,164 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
* and thus ranges with identical (start, end) (i.e, identical after GenJVM omitted the JUMPs in question)
* could be weeded out to avoid "java.lang.ClassFormatError: Illegal exception table range"
* Now that visitTryCatchBlock() must be called before Labels are resolved,
- * this method gets rid of the BasicBlocks described above (to recap, consisting of just a JUMP).
+ * renders the BasicBlocks described above (to recap, consisting of just a JUMP) unreachable.
*/
- private def collapseJumpOnlyBlocks(m: IMethod): Boolean = {
+ private def collapseJumpOnlyBlocks(m: IMethod) {
assert(m.hasCode, "code-less method")
- /* "start" is relative in a cycle, but we call this helper with the "first" entry-point we found. */
- def realTarget(jumpStart: BasicBlock): Map[BasicBlock, BasicBlock] = {
- assert(startsWithJump(jumpStart), "not part of a jump-chain")
- val Pair(dest, redundants) = finalDestination(jumpStart)
- (for(skipOver <- redundants) yield Pair(skipOver, dest)).toMap
- }
+ def rephraseGotos(detour: mutable.Map[BasicBlock, BasicBlock]) {
+ def lookup(b: BasicBlock) = detour.getOrElse(b, b)
- def rephraseGotos(detour: Map[BasicBlock, BasicBlock]) {
- for(Pair(oldTarget, newTarget) <- detour.iterator) {
- if(m.startBlock == oldTarget) {
- m.code.startBlock = newTarget
- }
- for(eh <- m.exh; if eh.startBlock == oldTarget) {
- eh.setStartBlock(newTarget)
- }
- for(b <- m.blocks; if !detour.isDefinedAt(b)) {
- val idxLast = (b.size - 1)
- b.lastInstruction match {
- case JUMP(whereto) =>
- if (whereto == oldTarget) {
- b.replaceInstruction(idxLast, JUMP(newTarget))
- }
- case CJUMP(succ, fail, cond, kind) =>
- if ((succ == oldTarget) || (fail == oldTarget)) {
- b.replaceInstruction(idxLast, CJUMP(detour.getOrElse(succ, succ),
- detour.getOrElse(fail, fail),
- cond, kind))
- }
- case CZJUMP(succ, fail, cond, kind) =>
- if ((succ == oldTarget) || (fail == oldTarget)) {
- b.replaceInstruction(idxLast, CZJUMP(detour.getOrElse(succ, succ),
- detour.getOrElse(fail, fail),
- cond, kind))
- }
- case SWITCH(tags, labels) =>
- if(labels exists (detour.isDefinedAt(_))) {
- val newLabels = (labels map { lab => detour.getOrElse(lab, lab) })
- b.replaceInstruction(idxLast, SWITCH(tags, newLabels))
- }
- case _ => ()
- }
- }
+ m.code.startBlock = lookup(m.code.startBlock)
+
+ for(eh <- m.exh)
+ eh.setStartBlock(lookup(eh.startBlock))
+
+ for (b <- m.blocks) {
+ def replaceLastInstruction(i: Instruction) = {
+ if (b.lastInstruction != i) {
+ val idxLast = b.size - 1
+ debuglog(s"In block $b, replacing last instruction ${b.lastInstruction} with ${i}")
+ b.replaceInstruction(idxLast, i)
}
}
- /* remove from all containers that may contain a reference to */
- def elide(redu: BasicBlock) {
- assert(m.startBlock != redu, "startBlock should have been re-wired by now")
- m.code.removeBlock(redu);
+ b.lastInstruction match {
+ case JUMP(whereto) =>
+ replaceLastInstruction(JUMP(lookup(whereto)))
+ case CJUMP(succ, fail, cond, kind) =>
+ replaceLastInstruction(CJUMP(lookup(succ), lookup(fail), cond, kind))
+ case CZJUMP(succ, fail, cond, kind) =>
+ replaceLastInstruction(CZJUMP(lookup(succ), lookup(fail), cond, kind))
+ case SWITCH(tags, labels) =>
+ val newLabels = (labels map lookup)
+ replaceLastInstruction(SWITCH(tags, newLabels))
+ case _ => ()
}
+ }
+ }
- var wasReduced = false
- val entryPoints: List[BasicBlock] = m.startBlock :: (m.exh map (_.startBlock));
-
- var elided = mutable.Set.empty[BasicBlock] // debug
- var newTargets = mutable.Set.empty[BasicBlock] // debug
-
- for (ep <- entryPoints) {
- var reachable = directSuccStar(ep) // this list may contain blocks belonging to jump-chains that we'll skip over
- while(reachable.nonEmpty) {
- val h = reachable.head
- reachable = reachable.tail
- if(startsWithJump(h)) {
- val detour = realTarget(h)
- if(detour.nonEmpty) {
- wasReduced = true
- reachable = (reachable filterNot (detour.keySet.contains(_)))
- rephraseGotos(detour)
- detour.keySet foreach elide
- elided ++= detour.keySet
- newTargets ++= detour.values
- }
+ /*
+ * Computes a mapping from jump only block to its
+ * final destination which is either a non-jump-only
+ * block or, if it's in a jump-only block cycle, is
+ * itself
+ */
+ def computeDetour: mutable.Map[BasicBlock, BasicBlock] = {
+ // fetch the jump only blocks and their immediate destinations
+ val pairs = for {
+ block <- m.blocks.toIterator
+ target <- getJumpOnlyTarget(block)
+ } yield(block, target)
+
+ // mapping from a jump-only block to our current knowledge of its
+ // final destination. Initially it's just jump block to immediate jump
+ // target
+ val detour = mutable.Map[BasicBlock, BasicBlock](pairs.toSeq:_*)
+
+ // for each jump-only block find its final destination
+ // taking advantage of the destinations we found for previous
+ // blocks
+ for (key <- detour.keySet) {
+ // we use the Robert Floyd's classic Tortoise and Hare algorithm
+ @tailrec
+ def findDestination(tortoise: BasicBlock, hare: BasicBlock): BasicBlock = {
+ if (tortoise == hare)
+ // cycle detected, map key to key
+ key
+ else if (detour contains hare) {
+ // advance hare once
+ val hare1 = detour(hare)
+ // make sure we can advance hare a second time
+ if (detour contains hare1)
+ // advance tortoise once and hare a second time
+ findDestination(detour(tortoise), detour(hare1))
+ else
+ // hare1 is not in the map so it's not a jump-only block, it's the destination
+ hare1
+ } else
+ // hare is not in the map so it's not a jump-only block, it's the destination
+ hare
}
+ // update the mapping for key based on its final destination
+ detour(key) = findDestination(key, detour(key))
}
+ detour
}
- assert(newTargets.intersect(elided).isEmpty, "contradiction: we just elided the final destionation of a jump-chain")
- wasReduced
+ val detour = computeDetour
+ rephraseGotos(detour)
+
+ if (settings.debug) {
+ val (remappings, cycles) = detour partition {case (source, target) => source != target}
+ for ((source, target) <- remappings) {
+ debuglog(s"Will elide jump only block $source because it can be jumped around to get to $target.")
+ if (m.startBlock == source) debugwarn("startBlock should have been re-wired by now")
+ }
+ val sources = remappings.keySet
+ val targets = remappings.values.toSet
+ val intersection = sources intersect targets
+
+ if (intersection.nonEmpty) debugwarn(s"contradiction: we seem to have some source and target overlap in blocks ${intersection.mkString}. Map was ${detour.mkString}")
+
+ for ((source, _) <- cycles) {
+ debuglog(s"Block $source is in a do-nothing infinite loop. Did the user write 'while(true){}'?")
+ }
+ }
+ }
+
+ /**
+ * Removes all blocks that are unreachable in a method using a standard reachability analysis.
+ */
+ def elimUnreachableBlocks(m: IMethod) {
+ assert(m.hasCode, "code-less method")
+
+ // assume nothing is reachable until we prove it can be reached
+ val reachable = mutable.Set[BasicBlock]()
+
+ // the set of blocks that we know are reachable but have
+ // yet to be marked reachable, initially only the start block
+ val worklist = mutable.Set(m.startBlock)
+
+ while (worklist.nonEmpty) {
+ val block = worklist.head
+ worklist remove block
+ // we know that one is reachable
+ reachable add block
+ // so are its successors, so go back around and add the ones we still
+ // think are unreachable
+ worklist ++= (block.successors filterNot reachable)
+ }
+
+ // exception handlers need to be told not to cover unreachable blocks
+ // and exception handlers that no longer cover any blocks need to be
+ // removed entirely
+ val unusedExceptionHandlers = mutable.Set[ExceptionHandler]()
+ for (exh <- m.exh) {
+ exh.covered = exh.covered filter reachable
+ if (exh.covered.isEmpty) {
+ unusedExceptionHandlers += exh
+ }
+ }
+
+ // remove the unusued exception handler references
+ if (settings.debug)
+ for (exh <- unusedExceptionHandlers) debuglog(s"eliding exception handler $exh because it does not cover any reachable blocks")
+ m.exh = m.exh filterNot unusedExceptionHandlers
+
+ // everything not in the reachable set is unreachable, unused, and unloved. buh bye
+ for (b <- m.blocks filterNot reachable) {
+ debuglog(s"eliding block $b because it is unreachable")
+ m.code removeBlock b
+ }
}
def normalize(m: IMethod) {
if(!m.hasCode) { return }
collapseJumpOnlyBlocks(m)
- var wasReduced = false;
- do {
- wasReduced = false
- // Prune from an exception handler those covered blocks which are jump-only.
- wasReduced |= coverWhatCountsOnly(m); icodes.checkValid(m) // TODO should be unnecessary now that collapseJumpOnlyBlocks(m) is in place
- // Prune exception handlers covering nothing.
- wasReduced |= elimNonCoveringExh(m); icodes.checkValid(m)
-
- // TODO see note in genExceptionHandlers about an ExceptionHandler.covered containing dead blocks (newNormal should remove them, but, where do those blocks come from?)
- } while (wasReduced)
-
- // TODO this would be a good time to remove synthetic local vars seeing no use, don't forget to call computeLocalVarsIndex() afterwards.
+ if (settings.optimise)
+ elimUnreachableBlocks(m)
+ icodes checkValid m
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
deleted file mode 100644
index 72b7e35408..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenAndroid.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-
-package scala.tools.nsc
-package backend.jvm
-
-import ch.epfl.lamp.fjbg._
-import symtab.Flags
-
-trait GenAndroid {
- self: GenJVM =>
-
- import global._
- import icodes._
- import opcodes._
-
- /** From the reference documentation of the Android SDK:
- * The `Parcelable` interface identifies classes whose instances can be
- * written to and restored from a `Parcel`. Classes implementing the
- * `Parcelable` interface must also have a static field called `CREATOR`,
- * which is an object implementing the `Parcelable.Creator` interface.
- */
- private val fieldName = newTermName("CREATOR")
-
- private lazy val AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
- private lazy val AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
-
- def isAndroidParcelableClass(sym: Symbol) =
- (AndroidParcelableInterface != NoSymbol) &&
- (sym.parentSymbols contains AndroidParcelableInterface)
-
- def addCreatorCode(codegen: BytecodeGenerator, block: BasicBlock) {
- import codegen._
- val fieldSymbol = (
- clasz.symbol.newValue(newTermName(fieldName), NoPosition, Flags.STATIC | Flags.FINAL)
- setInfo AndroidCreatorClass.tpe
- )
- val methodSymbol = definitions.getMember(clasz.symbol.companionModule, fieldName)
- clasz addField new IField(fieldSymbol)
- block emit CALL_METHOD(methodSymbol, Static(false))
- block emit STORE_FIELD(fieldSymbol, true)
- }
-
- def legacyAddCreatorCode(codegen: BytecodeGenerator, clinit: JExtendedCode) {
- import codegen._
- val creatorType = javaType(AndroidCreatorClass)
- jclass.addNewField(PublicStaticFinal,
- fieldName,
- creatorType)
- val moduleName = javaName(clasz.symbol)+"$"
- clinit.emitGETSTATIC(moduleName,
- nme.MODULE_INSTANCE_FIELD.toString,
- new JObjectType(moduleName))
- clinit.emitINVOKEVIRTUAL(moduleName, fieldName,
- new JMethodType(creatorType, Array()))
- clinit.emitPUTSTATIC(jclass.getName(), fieldName, creatorType)
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
deleted file mode 100644
index 36b294b289..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ /dev/null
@@ -1,1921 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import java.io.{ByteArrayOutputStream, DataOutputStream, OutputStream }
-import java.nio.ByteBuffer
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
-import scala.tools.nsc.symtab._
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
-import scala.reflect.internal.ClassfileConstants._
-import ch.epfl.lamp.fjbg._
-import JAccessFlags._
-import JObjectType.{ JAVA_LANG_STRING, JAVA_LANG_OBJECT }
-import java.util.jar.{ JarEntry, JarOutputStream }
-import scala.tools.nsc.io.AbstractFile
-import scala.language.postfixOps
-
-/** This class ...
- *
- * @author Iulian Dragos
- * @version 1.0
- *
- */
-abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with BytecodeWriters with GenJVMASM {
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- val phaseName = "jvm"
-
- /** Create a new phase */
- override def newPhase(p: Phase): Phase = new JvmPhase(p)
-
- /** JVM code generation phase
- */
- class JvmPhase(prev: Phase) extends ICodePhase(prev) {
- def name = phaseName
- override def erasedTypes = true
- def apply(cls: IClass) = sys.error("no implementation")
-
- override def run() {
- // we reinstantiate the bytecode generator at each run, to allow the GC
- // to collect everything
- if (settings.debug.value)
- inform("[running phase " + name + " on icode]")
-
- if (settings.Xdce.value)
- for ((sym, cls) <- icodes.classes if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym)) {
- log(s"Optimizer eliminated ${sym.fullNameString}")
- icodes.classes -= sym
- }
-
- // For predictably ordered error messages.
- val sortedClasses = classes.values.toList sortBy ("" + _.symbol.fullName)
- val entryPoints = sortedClasses filter isJavaEntryPoint
-
- val bytecodeWriter = settings.outputDirs.getSingleOutput match {
- case Some(f) if f hasExtension "jar" =>
- // If no main class was specified, see if there's only one
- // entry point among the classes going into the jar.
- if (settings.mainClass.isDefault) {
- entryPoints map (_.symbol fullName '.') match {
- case Nil =>
- log("No Main-Class designated or discovered.")
- case name :: Nil =>
- log("Unique entry point: setting Main-Class to " + name)
- settings.mainClass.value = name
- case names =>
- log("No Main-Class due to multiple entry points:\n " + names.mkString("\n "))
- }
- }
- else log("Main-Class was specified: " + settings.mainClass.value)
-
- new DirectToJarfileWriter(f.file)
-
- case _ =>
- if (settings.Ygenjavap.isDefault) {
- if(settings.Ydumpclasses.isDefault)
- new ClassBytecodeWriter { }
- else
- new ClassBytecodeWriter with DumpBytecodeWriter { }
- }
- else new ClassBytecodeWriter with JavapBytecodeWriter { }
- }
-
- val codeGenerator = new BytecodeGenerator(bytecodeWriter)
- debuglog("Created new bytecode generator for " + classes.size + " classes.")
-
- sortedClasses foreach { c =>
- try codeGenerator.genClass(c)
- catch {
- case e: JCode.CodeSizeTooBigException =>
- log("Skipped class %s because it has methods that are too long.".format(c))
- }
- }
-
- bytecodeWriter.close()
- classes.clear()
- }
- }
-
- var pickledBytes = 0 // statistics
-
- /**
- * Java bytecode generator.
- *
- */
- class BytecodeGenerator(bytecodeWriter: BytecodeWriter) extends BytecodeUtil {
- def this() = this(new ClassBytecodeWriter { })
- def debugLevel = settings.debuginfo.indexOfChoice
- import bytecodeWriter.writeClass
-
- val MIN_SWITCH_DENSITY = 0.7
- val INNER_CLASSES_FLAGS =
- (ACC_PUBLIC | ACC_PRIVATE | ACC_PROTECTED | ACC_STATIC | ACC_INTERFACE | ACC_ABSTRACT)
-
- val PublicStatic = ACC_PUBLIC | ACC_STATIC
- val PublicStaticFinal = ACC_PUBLIC | ACC_STATIC | ACC_FINAL
-
- val StringBuilderClassName = javaName(definitions.StringBuilderClass)
- val BoxesRunTime = "scala.runtime.BoxesRunTime"
-
- val StringBuilderType = new JObjectType(StringBuilderClassName) // TODO use ASMType.getObjectType
- val toStringType = new JMethodType(JAVA_LANG_STRING, JType.EMPTY_ARRAY) // TODO use ASMType.getMethodType
- val arrayCloneType = new JMethodType(JAVA_LANG_OBJECT, JType.EMPTY_ARRAY)
- val MethodTypeType = new JObjectType("java.dyn.MethodType")
- val JavaLangClassType = new JObjectType("java.lang.Class")
- val MethodHandleType = new JObjectType("java.dyn.MethodHandle")
-
- // Scala attributes
- val BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
- val BeanInfoSkipAttr = rootMirror.getRequiredClass("scala.beans.BeanInfoSkip")
- val BeanDisplayNameAttr = rootMirror.getRequiredClass("scala.beans.BeanDisplayName")
- val BeanDescriptionAttr = rootMirror.getRequiredClass("scala.beans.BeanDescription")
-
- // Additional interface parents based on annotations and other cues
- def newParentForAttr(attr: Symbol): Option[Symbol] = attr match {
- case SerializableAttr => Some(SerializableClass)
- case CloneableAttr => Some(JavaCloneableClass)
- case RemoteAttr => Some(RemoteInterfaceClass)
- case _ => None
- }
-
- val versionPickle = {
- val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
- assert(vp.writeIndex == 0, vp)
- vp writeNat PickleFormat.MajorVersion
- vp writeNat PickleFormat.MinorVersion
- vp writeNat 0
- vp
- }
-
- private def helperBoxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
- val boxedType = definitions.boxedClass(kind.toType.typeSymbol)
- val mtype = new JMethodType(javaType(boxedType), Array(javaType(kind)))
-
- Pair("boxTo" + boxedType.decodedName, mtype)
- }
-
- private val jBoxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
- BOOL -> helperBoxTo(BOOL) ,
- BYTE -> helperBoxTo(BYTE) ,
- CHAR -> helperBoxTo(CHAR) ,
- SHORT -> helperBoxTo(SHORT) ,
- INT -> helperBoxTo(INT) ,
- LONG -> helperBoxTo(LONG) ,
- FLOAT -> helperBoxTo(FLOAT) ,
- DOUBLE -> helperBoxTo(DOUBLE)
- )
-
- private def helperUnboxTo(kind: ValueTypeKind): Tuple2[String, JMethodType] = {
- val mtype = new JMethodType(javaType(kind), Array(JAVA_LANG_OBJECT))
- val mname = "unboxTo" + kind.toType.typeSymbol.decodedName
-
- Pair(mname, mtype)
- }
-
- private val jUnboxTo: Map[TypeKind, Tuple2[String, JMethodType]] = Map(
- BOOL -> helperUnboxTo(BOOL) ,
- BYTE -> helperUnboxTo(BYTE) ,
- CHAR -> helperUnboxTo(CHAR) ,
- SHORT -> helperUnboxTo(SHORT) ,
- INT -> helperUnboxTo(INT) ,
- LONG -> helperUnboxTo(LONG) ,
- FLOAT -> helperUnboxTo(FLOAT) ,
- DOUBLE -> helperUnboxTo(DOUBLE)
- )
-
- var clasz: IClass = _
- var method: IMethod = _
- var jclass: JClass = _
- var jmethod: JMethod = _
- // var jcode: JExtendedCode = _
-
- def isParcelableClass = isAndroidParcelableClass(clasz.symbol)
- def isRemoteClass = clasz.symbol hasAnnotation RemoteAttr
- def serialVUID = clasz.symbol getAnnotation SerialVersionUIDAttr collect {
- case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
- }
-
- val fjbgContext = new FJBGContext(49, 0)
-
- val emitSource = debugLevel >= 1
- val emitLines = debugLevel >= 2
- val emitVars = debugLevel >= 3
-
- // bug had phase with wrong name; leaving enabled for brief pseudo deprecation
- private val checkSignatures = (
- (settings.check containsName phaseName)
- || (settings.check.value contains "genjvm") && {
- global.warning("This option will be removed: please use -Ycheck:%s, not -Ycheck:genjvm." format phaseName)
- true
- }
- )
-
- /** For given symbol return a symbol corresponding to a class that should be declared as inner class.
- *
- * For example:
- * class A {
- * class B
- * object C
- * }
- *
- * then method will return NoSymbol for A, the same symbol for A.B (corresponding to A$B class) and A$C$ symbol
- * for A.C.
- */
- private def innerClassSymbolFor(s: Symbol): Symbol =
- if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
-
- override def javaName(sym: Symbol): String = { // TODO Miguel says: check whether a single pass over `icodes.classes` can populate `innerClassBuffer` faster.
- /**
- * Checks if given symbol corresponds to inner class/object and add it to innerClassBuffer
- *
- * Note: This method is called recursively thus making sure that we add complete chain
- * of inner class all until root class.
- */
- def collectInnerClass(s: Symbol): Unit = {
- // TODO: some beforeFlatten { ... } which accounts for
- // being nested in parameterized classes (if we're going to selectively flatten.)
- val x = innerClassSymbolFor(s)
- if(x ne NoSymbol) {
- assert(x.isClass, "not an inner-class symbol")
- val isInner = !x.rawowner.isPackageClass
- if (isInner) {
- innerClassBuffer += x
- collectInnerClass(x.rawowner)
- }
- }
- }
- collectInnerClass(sym)
-
- super.javaName(sym)
- }
-
- /** Write a class to disk, adding the Scala signature (pickled type
- * information) and inner classes.
- *
- * @param jclass The FJBG class, where code was emitted
- * @param sym The corresponding symbol, used for looking up pickled information
- */
- def emitClass(jclass: JClass, sym: Symbol) {
- addInnerClasses(jclass)
- writeClass("" + sym.name, jclass.getName(), toByteArray(jclass), sym)
- }
-
- /** Returns the ScalaSignature annotation if it must be added to this class,
- * none otherwise; furthermore, it adds to `jclass` the ScalaSig marker
- * attribute (marking that a scala signature annotation is present) or the
- * Scala marker attribute (marking that the signature for this class is in
- * another file). The annotation that is returned by this method must be
- * added to the class' annotations list when generating them.
- *
- * @param jclass The class file that is being readied.
- * @param sym The symbol for which the signature has been entered in
- * the symData map. This is different than the symbol
- * that is being generated in the case of a mirror class.
- * @return An option that is:
- * - defined and contains an annotation info of the
- * ScalaSignature type, instantiated with the pickle
- * signature for sym (a ScalaSig marker attribute has
- * been written);
- * - undefined if the jclass/sym couple must not contain a
- * signature (a Scala marker attribute has been written).
- */
- def scalaSignatureAddingMarker(jclass: JClass, sym: Symbol): Option[AnnotationInfo] =
- currentRun.symData get sym match {
- case Some(pickle) if !nme.isModuleName(newTermName(jclass.getName)) =>
- val scalaAttr =
- fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaSignatureATTR.toString,
- versionPickle.bytes, versionPickle.writeIndex)
- jclass addAttribute scalaAttr
- val scalaAnnot = {
- val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
- AnnotationInfo(sigBytes.sigAnnot, Nil, List((nme.bytes, sigBytes)))
- }
- pickledBytes += pickle.writeIndex
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
- Some(scalaAnnot)
- case _ =>
- val markerAttr =
- fjbgContext.JOtherAttribute(jclass, jclass, tpnme.ScalaATTR.toString, new Array[Byte](0), 0)
- jclass addAttribute markerAttr
- None
- }
-
- private var innerClassBuffer = mutable.LinkedHashSet[Symbol]()
-
- /** Drop redundant interfaces (ones which are implemented by some other parent) from the immediate parents.
- * This is important on Android because there is otherwise an interface explosion.
- */
- private def minimizeInterfaces(interfaces: List[Symbol]): List[Symbol] = {
- var rest = interfaces
- var leaves = List.empty[Symbol]
- while(!rest.isEmpty) {
- val candidate = rest.head
- val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
- if(!nonLeaf) {
- leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
- }
- rest = rest.tail
- }
-
- leaves
- }
-
- def genClass(c: IClass) {
- clasz = c
- innerClassBuffer.clear()
-
- val name = javaName(c.symbol)
-
- val ps = c.symbol.info.parents
-
- val superClass: Symbol = if(ps.isEmpty) ObjectClass else ps.head.typeSymbol;
-
- val superInterfaces0: List[Symbol] = if(ps.isEmpty) Nil else c.symbol.mixinClasses;
- val superInterfaces = superInterfaces0 ++ c.symbol.annotations.flatMap(ann => newParentForAttr(ann.symbol)) distinct
-
- val ifaces =
- if(superInterfaces.isEmpty) JClass.NO_INTERFACES
- else mkArray(minimizeInterfaces(superInterfaces) map javaName)
-
- jclass = fjbgContext.JClass(javaFlags(c.symbol),
- name,
- javaName(superClass),
- ifaces,
- c.cunit.source.toString)
-
- if (isStaticModule(c.symbol) || serialVUID != None || isParcelableClass) {
- if (isStaticModule(c.symbol))
- addModuleInstanceField
- addStaticInit(jclass, c.lookupStaticCtor)
-
- if (isTopLevelModule(c.symbol)) {
- if (c.symbol.companionClass == NoSymbol)
- generateMirrorClass(c.symbol, c.cunit.source)
- else
- log("No mirror class for module with linked class: " +
- c.symbol.fullName)
- }
- }
- else {
- c.lookupStaticCtor foreach (constructor => addStaticInit(jclass, Some(constructor)))
-
- // it must be a top level class (name contains no $s)
- def isCandidateForForwarders(sym: Symbol): Boolean =
- afterPickler {
- !(sym.name.toString contains '$') && sym.hasModuleFlag && !sym.isImplClass && !sym.isNestedClass
- }
-
- // At some point this started throwing lots of exceptions as a compile was finishing.
- // error: java.lang.AssertionError:
- // assertion failed: List(object package$CompositeThrowable, object package$CompositeThrowable)
- // ...is the one I've seen repeatedly. Suppressing.
- val lmoc = (
- try c.symbol.companionModule
- catch { case x: AssertionError =>
- Console.println("Suppressing failed assert: " + x)
- NoSymbol
- }
- )
- // add static forwarders if there are no name conflicts; see bugs #363 and #1735
- if (lmoc != NoSymbol && !c.symbol.isInterface) {
- if (isCandidateForForwarders(lmoc) && !settings.noForwarders.value) {
- log("Adding static forwarders from '%s' to implementations in '%s'".format(c.symbol, lmoc))
- addForwarders(jclass, lmoc.moduleClass)
- }
- }
- }
-
- clasz.fields foreach genField
- clasz.methods foreach genMethod
-
- val ssa = scalaSignatureAddingMarker(jclass, c.symbol)
- addGenericSignature(jclass, c.symbol, c.symbol.owner)
- addAnnotations(jclass, c.symbol.annotations ++ ssa)
- addEnclosingMethodAttribute(jclass, c.symbol)
- emitClass(jclass, c.symbol)
-
- if (c.symbol hasAnnotation BeanInfoAttr)
- genBeanInfoClass(c)
- }
-
- private def addEnclosingMethodAttribute(jclass: JClass, clazz: Symbol) {
- val sym = clazz.originalEnclosingMethod
- if (sym.isMethod) {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, sym.enclClass))
- jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
- jclass,
- javaName(sym.enclClass),
- javaName(sym),
- javaType(sym)
- )
- } else if (clazz.isAnonymousClass) {
- val enclClass = clazz.rawowner
- assert(enclClass.isClass, enclClass)
- val sym = enclClass.primaryConstructor
- if (sym == NoSymbol)
- log("Ran out of room looking for an enclosing method for %s: no constructor here.".format(
- enclClass, clazz)
- )
- else {
- debuglog("enclosing method for %s is %s (in %s)".format(clazz, sym, enclClass))
- jclass addAttribute fjbgContext.JEnclosingMethodAttribute(
- jclass,
- javaName(enclClass),
- javaName(sym),
- javaType(sym).asInstanceOf[JMethodType]
- )
- }
- }
- }
-
- private def toByteArray(jc: JClass): Array[Byte] = {
- val bos = new java.io.ByteArrayOutputStream()
- val dos = new java.io.DataOutputStream(bos)
- jc.writeTo(dos)
- dos.close()
- bos.toByteArray
- }
-
- /**
- * Generate a bean info class that describes the given class.
- *
- * @author Ross Judson (ross.judson@soletta.com)
- */
- def genBeanInfoClass(c: IClass) {
- val description = c.symbol getAnnotation BeanDescriptionAttr
- // informProgress(description.toString)
-
- val beanInfoClass = fjbgContext.JClass(javaFlags(c.symbol),
- javaName(c.symbol) + "BeanInfo",
- "scala/beans/ScalaBeanInfo",
- JClass.NO_INTERFACES,
- c.cunit.source.toString)
-
- var fieldList = List[String]()
- for (f <- clasz.fields if f.symbol.hasGetter;
- g = f.symbol.getter(c.symbol);
- s = f.symbol.setter(c.symbol);
- if g.isPublic && !(f.symbol.name startsWith "$")) // inserting $outer breaks the bean
- fieldList = javaName(f.symbol) :: javaName(g) :: (if (s != NoSymbol) javaName(s) else null) :: fieldList
- val methodList =
- for (m <- clasz.methods
- if !m.symbol.isConstructor &&
- m.symbol.isPublic &&
- !(m.symbol.name startsWith "$") &&
- !m.symbol.isGetter &&
- !m.symbol.isSetter) yield javaName(m.symbol)
-
- val constructor = beanInfoClass.addNewMethod(ACC_PUBLIC, "<init>", JType.VOID, new Array[JType](0), new Array[String](0))
- val jcode = constructor.getCode().asInstanceOf[JExtendedCode]
- val strKind = new JObjectType(javaName(StringClass))
- val stringArrayKind = new JArrayType(strKind)
- val conType = new JMethodType(JType.VOID, Array(javaType(ClassClass), stringArrayKind, stringArrayKind))
-
- def push(lst:Seq[String]) {
- var fi = 0
- for (f <- lst) {
- jcode.emitDUP()
- jcode emitPUSH fi
- if (f != null)
- jcode emitPUSH f
- else
- jcode.emitACONST_NULL()
- jcode emitASTORE strKind
- fi += 1
- }
- }
-
- jcode.emitALOAD_0()
- // push the class
- jcode emitPUSH javaType(c.symbol).asInstanceOf[JReferenceType]
-
- // push the string array of field information
- jcode emitPUSH fieldList.length
- jcode emitANEWARRAY strKind
- push(fieldList)
-
- // push the string array of method information
- jcode emitPUSH methodList.length
- jcode emitANEWARRAY strKind
- push(methodList)
-
- // invoke the superclass constructor, which will do the
- // necessary java reflection and create Method objects.
- jcode.emitINVOKESPECIAL("scala/beans/ScalaBeanInfo", "<init>", conType)
- jcode.emitRETURN()
-
- // write the bean information class file.
- writeClass("BeanInfo ", beanInfoClass.getName(), toByteArray(beanInfoClass), c.symbol)
- }
-
- /** Add the given 'throws' attributes to jmethod */
- def addExceptionsAttribute(jmethod: JMethod, excs: List[AnnotationInfo]) {
- if (excs.isEmpty) return
-
- val cpool = jmethod.getConstantPool
- val buf: ByteBuffer = ByteBuffer.allocate(512)
- var nattr = 0
-
- // put some random value; the actual number is determined at the end
- buf putShort 0xbaba.toShort
-
- for (ThrownException(exc) <- excs.distinct) {
- buf.putShort(
- cpool.addClass(
- javaName(exc)).shortValue)
- nattr += 1
- }
-
- assert(nattr > 0, nattr)
- buf.putShort(0, nattr.toShort)
- addAttribute(jmethod, tpnme.ExceptionsATTR, buf)
- }
-
- /** Whether an annotation should be emitted as a Java annotation
- * .initialize: if 'annot' is read from pickle, atp might be un-initialized
- */
- private def shouldEmitAnnotation(annot: AnnotationInfo) =
- annot.symbol.initialize.isJavaDefined &&
- annot.matches(ClassfileAnnotationClass) &&
- annot.args.isEmpty
-
- private def emitJavaAnnotations(cpool: JConstantPool, buf: ByteBuffer, annotations: List[AnnotationInfo]): Int = {
- def emitArgument(arg: ClassfileAnnotArg): Unit = arg match {
- case LiteralAnnotArg(const) =>
- const.tag match {
- case BooleanTag =>
- buf put 'Z'.toByte
- buf putShort cpool.addInteger(if(const.booleanValue) 1 else 0).toShort
- case ByteTag =>
- buf put 'B'.toByte
- buf putShort cpool.addInteger(const.byteValue).toShort
- case ShortTag =>
- buf put 'S'.toByte
- buf putShort cpool.addInteger(const.shortValue).toShort
- case CharTag =>
- buf put 'C'.toByte
- buf putShort cpool.addInteger(const.charValue).toShort
- case IntTag =>
- buf put 'I'.toByte
- buf putShort cpool.addInteger(const.intValue).toShort
- case LongTag =>
- buf put 'J'.toByte
- buf putShort cpool.addLong(const.longValue).toShort
- case FloatTag =>
- buf put 'F'.toByte
- buf putShort cpool.addFloat(const.floatValue).toShort
- case DoubleTag =>
- buf put 'D'.toByte
- buf putShort cpool.addDouble(const.doubleValue).toShort
- case StringTag =>
- buf put 's'.toByte
- buf putShort cpool.addUtf8(const.stringValue).toShort
- case ClazzTag =>
- buf put 'c'.toByte
- buf putShort cpool.addUtf8(javaType(const.typeValue).getSignature()).toShort
- case EnumTag =>
- buf put 'e'.toByte
- buf putShort cpool.addUtf8(javaType(const.tpe).getSignature()).toShort
- buf putShort cpool.addUtf8(const.symbolValue.name.toString).toShort
- }
-
- case sb@ScalaSigBytes(bytes) if !sb.isLong =>
- buf put 's'.toByte
- buf putShort cpool.addUtf8(sb.encodedBytes).toShort
-
- case sb@ScalaSigBytes(bytes) if sb.isLong =>
- buf put '['.toByte
- val stringCount = (sb.encodedBytes.length / 65534) + 1
- buf putShort stringCount.toShort
- for (i <- 0 until stringCount) {
- buf put 's'.toByte
- val j = i * 65535
- val string = sb.encodedBytes.slice(j, j + 65535)
- buf putShort cpool.addUtf8(string).toShort
- }
-
- case ArrayAnnotArg(args) =>
- buf put '['.toByte
- buf putShort args.length.toShort
- args foreach emitArgument
-
- case NestedAnnotArg(annInfo) =>
- buf put '@'.toByte
- emitAnnotation(annInfo)
- }
-
- def emitAnnotation(annotInfo: AnnotationInfo) {
- val AnnotationInfo(typ, args, assocs) = annotInfo
- val jtype = javaType(typ)
- buf putShort cpool.addUtf8(jtype.getSignature()).toShort
- assert(args.isEmpty, args)
- buf putShort assocs.length.toShort
- for ((name, value) <- assocs) {
- buf putShort cpool.addUtf8(name.toString).toShort
- emitArgument(value)
- }
- }
-
- var nannots = 0
- val pos = buf.position()
-
- // put some random value; the actual number of annotations is determined at the end
- buf putShort 0xbaba.toShort
-
- for (annot <- annotations if shouldEmitAnnotation(annot)) {
- nannots += 1
- emitAnnotation(annot)
- }
-
- // save the number of annotations
- buf.putShort(pos, nannots.toShort)
- nannots
- }
-
- // @M don't generate java generics sigs for (members of) implementation
- // classes, as they are monomorphic (TODO: ok?)
- private def needsGenericSignature(sym: Symbol) = !(
- // PP: This condition used to include sym.hasExpandedName, but this leads
- // to the total loss of generic information if a private member is
- // accessed from a closure: both the field and the accessor were generated
- // without it. This is particularly bad because the availability of
- // generic information could disappear as a consequence of a seemingly
- // unrelated change.
- settings.Ynogenericsig.value
- || sym.isArtifact
- || sym.isLiftedMethod
- || sym.isBridge
- || (sym.ownerChain exists (_.isImplClass))
- )
- def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
- if (needsGenericSignature(sym)) {
- val memberTpe = beforeErasure(owner.thisType.memberInfo(sym))
-
- erasure.javaSig(sym, memberTpe) foreach { sig =>
- // This seems useful enough in the general case.
- log(sig)
- if (checkSignatures) {
- val normalizedTpe = beforeErasure(erasure.prepareSigMap(memberTpe))
- val bytecodeTpe = owner.thisType.memberInfo(sym)
- if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
- clasz.cunit.warning(sym.pos,
- """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
- |signature: %s
- |original type: %s
- |normalized type: %s
- |erasure type: %s
- |if this is reproducible, please report bug at https://issues.scala-lang.org/
- """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
- return
- }
- }
- val index = jmember.getConstantPool.addUtf8(sig).toShort
- if (opt.verboseDebug)
- beforeErasure(println("add generic sig "+sym+":"+sym.info+" ==> "+sig+" @ "+index))
-
- val buf = ByteBuffer.allocate(2)
- buf putShort index
- addAttribute(jmember, tpnme.SignatureATTR, buf)
- }
- }
- }
-
- def addAnnotations(jmember: JMember, annotations: List[AnnotationInfo]) {
- if (annotations exists (_ matches definitions.DeprecatedAttr)) {
- val attr = jmember.getContext().JOtherAttribute(
- jmember.getJClass(), jmember, tpnme.DeprecatedATTR.toString,
- new Array[Byte](0), 0)
- jmember addAttribute attr
- }
-
- val toEmit = annotations filter shouldEmitAnnotation
- if (toEmit.isEmpty) return
-
- val buf: ByteBuffer = ByteBuffer.allocate(2048)
- emitJavaAnnotations(jmember.getConstantPool, buf, toEmit)
- addAttribute(jmember, tpnme.RuntimeAnnotationATTR, buf)
- }
-
- def addParamAnnotations(jmethod: JMethod, pannotss: List[List[AnnotationInfo]]) {
- val annotations = pannotss map (_ filter shouldEmitAnnotation)
- if (annotations forall (_.isEmpty)) return
-
- val buf: ByteBuffer = ByteBuffer.allocate(2048)
-
- // number of parameters
- buf.put(annotations.length.toByte)
- for (annots <- annotations)
- emitJavaAnnotations(jmethod.getConstantPool, buf, annots)
-
- addAttribute(jmethod, tpnme.RuntimeParamAnnotationATTR, buf)
- }
-
- def addAttribute(jmember: JMember, name: Name, buf: ByteBuffer) {
- if (buf.position() < 2)
- return
-
- val length = buf.position()
- val arr = buf.array().slice(0, length)
-
- val attr = jmember.getContext().JOtherAttribute(jmember.getJClass(),
- jmember,
- name.toString,
- arr,
- length)
- jmember addAttribute attr
- }
-
- def addInnerClasses(jclass: JClass) {
- /** The outer name for this inner class. Note that it returns null
- * when the inner class should not get an index in the constant pool.
- * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
- */
- def outerName(innerSym: Symbol): String = {
- if (innerSym.originalEnclosingMethod != NoSymbol)
- null
- else {
- val outerName = javaName(innerSym.rawowner)
- if (isTopLevelModule(innerSym.rawowner)) "" + nme.stripModuleSuffix(newTermName(outerName))
- else outerName
- }
- }
-
- def innerName(innerSym: Symbol): String =
- if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
- null
- else
- innerSym.rawname + innerSym.moduleSuffix
-
- // add inner classes which might not have been referenced yet
- afterErasure {
- for (sym <- List(clasz.symbol, clasz.symbol.linkedClassOfClass); m <- sym.info.decls.map(innerClassSymbolFor) if m.isClass)
- innerClassBuffer += m
- }
-
- val allInners = innerClassBuffer.toList
- if (allInners.nonEmpty) {
- debuglog(clasz.symbol.fullName('.') + " contains " + allInners.size + " inner classes.")
- val innerClassesAttr = jclass.getInnerClasses()
- // sort them so inner classes succeed their enclosing class
- // to satisfy the Eclipse Java compiler
- for (innerSym <- allInners sortBy (_.name.length)) {
- val flags = {
- val staticFlag = if (innerSym.rawowner.hasModuleFlag) ACC_STATIC else 0
- (javaFlags(innerSym) | staticFlag) & INNER_CLASSES_FLAGS
- }
- val jname = javaName(innerSym)
- val oname = outerName(innerSym)
- val iname = innerName(innerSym)
-
- // Mimicking javap inner class output
- debuglog(
- if (oname == null || iname == null) "//class " + jname
- else "//%s=class %s of class %s".format(iname, jname, oname)
- )
-
- innerClassesAttr.addEntry(jname, oname, iname, flags)
- }
- }
- }
-
- def genField(f: IField) {
- debuglog("Adding field: " + f.symbol.fullName)
-
- val jfield = jclass.addNewField(
- javaFieldFlags(f.symbol),
- javaName(f.symbol),
- javaType(f.symbol.tpe)
- )
-
- addGenericSignature(jfield, f.symbol, clasz.symbol)
- addAnnotations(jfield, f.symbol.annotations)
- }
-
- def genMethod(m: IMethod) {
- if (m.symbol.isStaticConstructor || definitions.isGetClass(m.symbol)) return
-
- debuglog("Generating method " + m.symbol.fullName)
- method = m
- endPC.clear
- computeLocalVarsIndex(m)
-
- var resTpe = javaType(m.symbol.tpe.resultType)
- if (m.symbol.isClassConstructor)
- resTpe = JType.VOID
-
- var flags = javaFlags(m.symbol)
- if (jclass.isInterface)
- flags |= ACC_ABSTRACT
-
- if (m.symbol.isStrictFP)
- flags |= ACC_STRICT
-
- // native methods of objects are generated in mirror classes
- if (method.native)
- flags |= ACC_NATIVE
-
- jmethod = jclass.addNewMethod(flags,
- javaName(m.symbol),
- resTpe,
- mkArray(m.params map (p => javaType(p.kind))),
- mkArray(m.params map (p => javaName(p.sym))))
-
- addRemoteException(jmethod, m.symbol)
-
- if (!jmethod.isAbstract() && !method.native) {
- val jcode = jmethod.getCode().asInstanceOf[JExtendedCode]
-
- // add a fake local for debugging purposes
- if (emitVars && isClosureApply(method.symbol)) {
- val outerField = clasz.symbol.info.decl(nme.OUTER_LOCAL)
- if (outerField != NoSymbol) {
- log("Adding fake local to represent outer 'this' for closure " + clasz)
- val _this = new Local(
- method.symbol.newVariable(nme.FAKE_LOCAL_THIS), toTypeKind(outerField.tpe), false)
- m.locals = m.locals ::: List(_this)
- computeLocalVarsIndex(m) // since we added a new local, we need to recompute indexes
-
- jcode.emitALOAD_0()
- jcode.emitGETFIELD(javaName(clasz.symbol),
- javaName(outerField),
- javaType(outerField))
- jcode.emitSTORE(indexOf(_this), javaType(_this.kind))
- }
- }
-
- for (local <- m.locals if ! m.params.contains(local)) {
- debuglog("add local var: " + local)
- jmethod.addNewLocalVariable(javaType(local.kind), javaName(local.sym))
- }
-
- genCode(m)
- if (emitVars)
- genLocalVariableTable(m, jcode)
- }
-
- addGenericSignature(jmethod, m.symbol, clasz.symbol)
- val (excs, others) = m.symbol.annotations partition (_.symbol == ThrowsClass)
- addExceptionsAttribute(jmethod, excs)
- addAnnotations(jmethod, others)
- addParamAnnotations(jmethod, m.params.map(_.sym.annotations))
-
- // check for code size
- try jmethod.freeze()
- catch {
- case e: JCode.CodeSizeTooBigException =>
- clasz.cunit.error(m.symbol.pos, "Code size exceeds JVM limits: %d".format(e.codeSize))
- throw e
- }
- }
-
- /** Adds a @remote annotation, actual use unknown.
- */
- private def addRemoteException(jmethod: JMethod, meth: Symbol) {
- val needsAnnotation = (
- (isRemoteClass || (meth hasAnnotation RemoteAttr) && jmethod.isPublic)
- && !(meth.throwsAnnotations contains RemoteExceptionClass)
- )
- if (needsAnnotation) {
- val c = Constant(RemoteExceptionClass.tpe)
- val arg = Literal(c) setType c.tpe
- meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
- }
- }
-
- private def isClosureApply(sym: Symbol): Boolean = {
- (sym.name == nme.apply) &&
- sym.owner.isSynthetic &&
- sym.owner.tpe.parents.exists { t =>
- val TypeRef(_, sym, _) = t
- FunctionClass contains sym
- }
- }
-
- def addModuleInstanceField() {
- jclass.addNewField(PublicStaticFinal,
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
- }
-
- def addStaticInit(cls: JClass, mopt: Option[IMethod]) {
- val clinitMethod = cls.addNewMethod(PublicStatic,
- "<clinit>",
- JType.VOID,
- JType.EMPTY_ARRAY,
- new Array[String](0))
- val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode]
-
- mopt match {
- case Some(m) =>
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
-
- if (isStaticModule(clasz.symbol)) {
- // call object's private ctor from static ctor
- lastBlock emit NEW(REFERENCE(m.symbol.enclClass))
- lastBlock emit CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true))
- }
-
- // add serialVUID code
- serialVUID foreach { value =>
- import Flags._, definitions._
- val fieldName = "serialVersionUID"
- val fieldSymbol = clasz.symbol.newValue(newTermName(fieldName), NoPosition, STATIC | FINAL) setInfo LongClass.tpe
- clasz addField new IField(fieldSymbol)
- lastBlock emit CONSTANT(Constant(value))
- lastBlock emit STORE_FIELD(fieldSymbol, true)
- }
-
- if (isParcelableClass)
- addCreatorCode(BytecodeGenerator.this, lastBlock)
-
- lastBlock emit RETURN(UNIT)
- lastBlock.close
-
- method = m
- jmethod = clinitMethod
- genCode(m)
- case None =>
- legacyStaticInitializer(cls, clinit)
- }
- }
-
- private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) {
- if (isStaticModule(clasz.symbol)) {
- clinit emitNEW cls.getName()
- clinit.emitINVOKESPECIAL(cls.getName(),
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
- }
-
- serialVUID foreach { value =>
- val fieldName = "serialVersionUID"
- jclass.addNewField(PublicStaticFinal, fieldName, JType.LONG)
- clinit emitPUSH value
- clinit.emitPUSH(value)
- clinit.emitPUTSTATIC(jclass.getName(), fieldName, JType.LONG)
- }
-
- if (isParcelableClass)
- legacyAddCreatorCode(BytecodeGenerator.this, clinit)
-
- clinit.emitRETURN()
- }
-
- /** Add a forwarder for method m */
- def addForwarder(jclass: JClass, module: Symbol, m: Symbol) {
- val moduleName = javaName(module)
- val methodInfo = module.thisType.memberInfo(m)
- val paramJavaTypes = methodInfo.paramTypes map javaType
- val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
- // TODO: evaluate the other flags we might be dropping on the floor here.
- val flags = PublicStatic | (
- if (m.isVarargsMethod) ACC_VARARGS else 0
- )
-
- /** Forwarders must not be marked final, as the JVM will not allow
- * redefinition of a final static method, and we don't know what classes
- * might be subclassing the companion class. See SI-4827.
- */
- val mirrorMethod = jclass.addNewMethod(
- flags,
- javaName(m),
- javaType(methodInfo.resultType),
- mkArray(paramJavaTypes),
- mkArray(paramNames))
- val mirrorCode = mirrorMethod.getCode().asInstanceOf[JExtendedCode]
- mirrorCode.emitGETSTATIC(moduleName,
- nme.MODULE_INSTANCE_FIELD.toString,
- new JObjectType(moduleName))
-
- var i = 0
- var index = 0
- var argTypes = mirrorMethod.getArgumentTypes()
- while (i < argTypes.length) {
- mirrorCode.emitLOAD(index, argTypes(i))
- index += argTypes(i).getSize()
- i += 1
- }
-
- mirrorCode.emitINVOKEVIRTUAL(moduleName, mirrorMethod.getName, javaType(m).asInstanceOf[JMethodType])
- mirrorCode emitRETURN mirrorMethod.getReturnType()
-
- addRemoteException(mirrorMethod, m)
- // only add generic signature if the method is concrete; bug #1745
- if (!m.isDeferred)
- addGenericSignature(mirrorMethod, m, module)
-
- val (throws, others) = m.annotations partition (_.symbol == ThrowsClass)
- addExceptionsAttribute(mirrorMethod, throws)
- addAnnotations(mirrorMethod, others)
- addParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
- }
-
- /** Add forwarders for all methods defined in `module` that don't conflict
- * with methods in the companion class of `module`. A conflict arises when
- * a method with the same name is defined both in a class and its companion
- * object: method signature is not taken into account.
- */
- def addForwarders(jclass: JClass, moduleClass: Symbol) {
- assert(moduleClass.isModuleClass, moduleClass)
- debuglog("Dumping mirror class for object: " + moduleClass)
-
- val className = jclass.getName
- val linkedClass = moduleClass.companionClass
- val linkedModule = linkedClass.companionSymbol
- lazy val conflictingNames: Set[Name] = {
- linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name } toSet
- }
- debuglog("Potentially conflicting names for forwarders: " + conflictingNames)
-
- for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, Flags.METHOD)) {
- if (m.isType || m.isDeferred || (m.owner eq ObjectClass) || m.isConstructor)
- debuglog("No forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- else if (conflictingNames(m.name))
- log("No forwarder for " + m + " due to conflict with " + linkedClass.info.member(m.name))
- else {
- log("Adding static forwarder for '%s' from %s to '%s'".format(m, className, moduleClass))
- addForwarder(jclass, moduleClass, m)
- }
- }
- }
-
- /** Generate a mirror class for a top-level module. A mirror class is a class
- * containing only static methods that forward to the corresponding method
- * on the MODULE instance of the given Scala object. It will only be
- * generated if there is no companion class: if there is, an attempt will
- * instead be made to add the forwarder methods to the companion class.
- */
- def generateMirrorClass(clasz: Symbol, sourceFile: SourceFile) {
- import JAccessFlags._
- /* We need to save inner classes buffer and create a new one to make sure
- * that we do confuse inner classes of the class we mirror with inner
- * classes of the class we are mirroring. These two sets can be different
- * as seen in this case:
- *
- * class A {
- * class B
- * def b: B = new B
- * }
- * object C extends A
- *
- * Here mirror class of C has a static forwarder for (inherited) method `b`
- * therefore it refers to class `B` and needs InnerClasses entry. However,
- * the real class for `C` (named `C$`) is empty and does not refer to `B`
- * thus does not need InnerClasses entry it.
- *
- * NOTE: This logic has been refactored in GenASM and everything is
- * implemented in a much cleaner way by having two separate buffers.
- */
- val savedInnerClasses = innerClassBuffer
- innerClassBuffer = mutable.LinkedHashSet[Symbol]()
- val moduleName = javaName(clasz) // + "$"
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val mirrorClass = fjbgContext.JClass(ACC_SUPER | ACC_PUBLIC | ACC_FINAL,
- mirrorName,
- JAVA_LANG_OBJECT.getName,
- JClass.NO_INTERFACES,
- "" + sourceFile)
-
- log("Dumping mirror class for '%s'".format(mirrorClass.getName))
- addForwarders(mirrorClass, clasz)
- val ssa = scalaSignatureAddingMarker(mirrorClass, clasz.companionSymbol)
- addAnnotations(mirrorClass, clasz.annotations ++ ssa)
- emitClass(mirrorClass, clasz)
- innerClassBuffer = savedInnerClasses
- }
-
- var linearization: List[BasicBlock] = Nil
- var isModuleInitialized = false
-
- /**
- * @param m ...
- */
- def genCode(m: IMethod) {
- val jcode = jmethod.getCode.asInstanceOf[JExtendedCode]
-
- def makeLabels(bs: List[BasicBlock]) = {
- debuglog("Making labels for: " + method)
-
- mutable.HashMap(bs map (_ -> jcode.newLabel) : _*)
- }
-
- isModuleInitialized = false
-
- linearization = linearizer.linearize(m)
- val labels = makeLabels(linearization)
-
- var nextBlock: BasicBlock = linearization.head
-
- def genBlocks(l: List[BasicBlock]): Unit = l match {
- case Nil => ()
- case x :: Nil => nextBlock = null; genBlock(x)
- case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
-
- /** Generate exception handlers for the current method. */
- def genExceptionHandlers() {
-
- /** Return a list of pairs of intervals where the handler is active.
- * The intervals in the list have to be inclusive in the beginning and
- * exclusive in the end: [start, end).
- */
- def ranges(e: ExceptionHandler): List[(Int, Int)] = {
- var covered = e.covered
- var ranges: List[(Int, Int)] = Nil
- var start = -1
- var end = -1
-
- linearization foreach { b =>
- if (! (covered contains b) ) {
- if (start >= 0) { // we're inside a handler range
- end = labels(b).getAnchor()
- ranges ::= ((start, end))
- start = -1
- }
- } else {
- if (start < 0) // we're not inside a handler range
- start = labels(b).getAnchor()
-
- end = endPC(b)
- covered -= b
- }
- }
-
- /* Add the last interval. Note that since the intervals are
- * open-ended to the right, we have to give a number past the actual
- * code!
- */
- if (start >= 0) {
- ranges ::= ((start, jcode.getPC()))
- }
-
- if (!covered.isEmpty)
- debuglog("Some covered blocks were not found in method: " + method +
- " covered: " + covered + " not in " + linearization)
- ranges
- }
-
- for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
- if (p._1 < p._2) {
- debuglog("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- val cls = if (e.cls == NoSymbol || e.cls == ThrowableClass) null
- else javaName(e.cls)
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- cls)
- } else
- log("Empty exception range: " + p)
- }
- }
-
- def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
- target.isPublic || target.isProtected && {
- (site.enclClass isSubClass target.enclClass) ||
- (site.enclosingPackage == target.privateWithin)
- }
- }
-
- def genCallMethod(call: CALL_METHOD) {
- val CALL_METHOD(method, style) = call
- val siteSymbol = clasz.symbol
- val hostSymbol = call.hostClass
- val methodOwner = method.owner
- // info calls so that types are up to date; erasure may add lateINTERFACE to traits
- hostSymbol.info ; methodOwner.info
-
- def needsInterfaceCall(sym: Symbol) = (
- sym.isInterface
- || sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
- )
- // whether to reference the type of the receiver or
- // the type of the method owner
- val useMethodOwner = (
- style != Dynamic
- || hostSymbol.isBottomClass
- || methodOwner == ObjectClass
- )
- val receiver = if (useMethodOwner) methodOwner else hostSymbol
- val jowner = javaName(receiver)
- val jname = javaName(method)
- val jtype = javaType(method).asInstanceOf[JMethodType]
-
- def dbg(invoke: String) {
- debuglog("%s %s %s.%s:%s".format(invoke, receiver.accessString, jowner, jname, jtype))
- }
-
- def initModule() {
- // we initialize the MODULE$ field immediately after the super ctor
- if (isStaticModule(siteSymbol) && !isModuleInitialized &&
- jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
- isModuleInitialized = true
- jcode.emitALOAD_0()
- jcode.emitPUTSTATIC(jclass.getName(),
- nme.MODULE_INSTANCE_FIELD.toString,
- jclass.getType())
- }
- }
-
- style match {
- case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
- case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
- case SuperCall(_) =>
- dbg("invokespecial")
- jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- initModule()
- }
- }
-
- def genBlock(b: BasicBlock) {
- labels(b).anchorToNext()
-
- debuglog("Generating code for block: " + b + " at pc: " + labels(b).getAnchor())
- var lastMappedPC = 0
- var lastLineNr = 0
- var crtPC = 0
-
- /** local variables whose scope appears in this block. */
- val varsInBlock: mutable.Set[Local] = new mutable.HashSet
- val lastInstr = b.lastInstruction
-
- for (instr <- b) {
- instr match {
- case THIS(clasz) => jcode.emitALOAD_0()
-
- case CONSTANT(const) => genConstant(jcode, const)
-
- case LOAD_ARRAY_ITEM(kind) =>
- if(kind.isRefOrArrayType) { jcode.emitAALOAD() }
- else {
- (kind: @unchecked) match {
- case UNIT => throw new IllegalArgumentException("invalid type for aload " + kind)
- case BOOL | BYTE => jcode.emitBALOAD()
- case SHORT => jcode.emitSALOAD()
- case CHAR => jcode.emitCALOAD()
- case INT => jcode.emitIALOAD()
- case LONG => jcode.emitLALOAD()
- case FLOAT => jcode.emitFALOAD()
- case DOUBLE => jcode.emitDALOAD()
- }
- }
-
- case LOAD_LOCAL(local) => jcode.emitLOAD(indexOf(local), javaType(local.kind))
-
- case lf @ LOAD_FIELD(field, isStatic) =>
- var owner = javaName(lf.hostClass)
- debuglog("LOAD_FIELD with owner: " + owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- val fieldJName = javaName(field)
- val fieldJType = javaType(field)
- if (isStatic) jcode.emitGETSTATIC(owner, fieldJName, fieldJType)
- else jcode.emitGETFIELD( owner, fieldJName, fieldJType)
-
- case LOAD_MODULE(module) =>
- // assert(module.isModule, "Expected module: " + module)
- debuglog("generating LOAD_MODULE for: " + module + " flags: " + Flags.flagsToString(module.flags));
- if (clasz.symbol == module.moduleClass && jmethod.getName() != nme.readResolve.toString)
- jcode.emitALOAD_0()
- else
- jcode.emitGETSTATIC(javaName(module) /* + "$" */ ,
- nme.MODULE_INSTANCE_FIELD.toString,
- javaType(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- if(kind.isRefOrArrayType) { jcode.emitAASTORE() }
- else {
- (kind: @unchecked) match {
- case UNIT => throw new IllegalArgumentException("invalid type for astore " + kind)
- case BOOL | BYTE => jcode.emitBASTORE()
- case SHORT => jcode.emitSASTORE()
- case CHAR => jcode.emitCASTORE()
- case INT => jcode.emitIASTORE()
- case LONG => jcode.emitLASTORE()
- case FLOAT => jcode.emitFASTORE()
- case DOUBLE => jcode.emitDASTORE()
- }
- }
-
- case STORE_LOCAL(local) =>
- jcode.emitSTORE(indexOf(local), javaType(local.kind))
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- jcode.emitASTORE_0()
-
- case STORE_FIELD(field, isStatic) =>
- val owner = javaName(field.owner)
- val fieldJName = javaName(field)
- val fieldJType = javaType(field)
- if (isStatic) jcode.emitPUTSTATIC(owner, fieldJName, fieldJType)
- else jcode.emitPUTFIELD( owner, fieldJName, fieldJType)
-
- case CALL_PRIMITIVE(primitive) => genPrimitive(primitive, instr.pos)
-
- /** Special handling to access native Array.clone() */
- case call @ CALL_METHOD(definitions.Array_clone, Dynamic) =>
- val target: String = javaType(call.targetTypeKind).getSignature()
- jcode.emitINVOKEVIRTUAL(target, "clone", arrayCloneType)
-
- case call @ CALL_METHOD(method, style) => genCallMethod(call)
-
- case BOX(kind) =>
- val Pair(mname, mtype) = jBoxTo(kind)
- jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
- case UNBOX(kind) =>
- val Pair(mname, mtype) = jUnboxTo(kind)
- jcode.emitINVOKESTATIC(BoxesRunTime, mname, mtype)
-
- case NEW(REFERENCE(cls)) =>
- val className = javaName(cls)
- jcode emitNEW className
-
- case CREATE_ARRAY(elem, 1) =>
- if(elem.isRefOrArrayType) { jcode emitANEWARRAY javaType(elem).asInstanceOf[JReferenceType] }
- else { jcode emitNEWARRAY javaType(elem) }
-
- case CREATE_ARRAY(elem, dims) =>
- jcode.emitMULTIANEWARRAY(javaType(ArrayN(elem, dims)).asInstanceOf[JReferenceType], dims)
-
- case IS_INSTANCE(tpe) =>
- tpe match {
- case REFERENCE(cls) => jcode emitINSTANCEOF new JObjectType(javaName(cls))
- case ARRAY(elem) => jcode emitINSTANCEOF new JArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- case CHECK_CAST(tpe) =>
- tpe match {
- case REFERENCE(cls) => if (cls != ObjectClass) { jcode emitCHECKCAST new JObjectType(javaName(cls)) } // No need to checkcast for Objects
- case ARRAY(elem) => jcode emitCHECKCAST new JArrayType(javaType(elem))
- case _ => abort("Unknown reference type in IS_INSTANCE: " + tpe)
- }
-
- case SWITCH(tags, branches) =>
- val tagArray = new Array[Array[Int]](tags.length)
- var caze = tags
- var i = 0
-
- while (i < tagArray.length) {
- tagArray(i) = new Array[Int](caze.head.length)
- caze.head.copyToArray(tagArray(i), 0)
- i += 1
- caze = caze.tail
- }
- val branchArray = jcode.newLabels(tagArray.length)
- i = 0
- while (i < branchArray.length) {
- branchArray(i) = labels(branches(i))
- i += 1
- }
- debuglog("Emitting SWITCH:\ntags: " + tags + "\nbranches: " + branches)
- jcode.emitSWITCH(tagArray,
- branchArray,
- labels(branches.last),
- MIN_SWITCH_DENSITY)
- ()
-
- case JUMP(whereto) =>
- if (nextBlock != whereto)
- jcode.emitGOTO_maybe_W(labels(whereto), false) // default to short jumps
-
- case CJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF_ICMP(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ICMP(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- if (nextBlock == success) {
- jcode.emitIF_ACMP(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF_ACMP(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLCMP()
- case FLOAT =>
- if (cond == LT || cond == LE) jcode.emitFCMPG()
- else jcode.emitFCMPL()
- case DOUBLE =>
- if (cond == LT || cond == LE) jcode.emitDCMPG()
- else jcode.emitDCMPL()
- }
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- // .. and fall through to success label
- } else {
- jcode.emitIF(conds(cond), labels(success));
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- }
-
- case CZJUMP(success, failure, cond, kind) =>
- if(kind.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- } else {
- jcode.emitIF(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- } else if(kind.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
- val Success = success
- val Failure = failure
- (cond, nextBlock) match {
- case (EQ, Success) => jcode emitIFNONNULL labels(failure)
- case (NE, Failure) => jcode emitIFNONNULL labels(success)
- case (EQ, Failure) => jcode emitIFNULL labels(success)
- case (NE, Success) => jcode emitIFNULL labels(failure)
- case (EQ, _) =>
- jcode emitIFNULL labels(success)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- case (NE, _) =>
- jcode emitIFNONNULL labels(success)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- case _ =>
- }
- } else {
- (kind: @unchecked) match {
- case LONG =>
- jcode.emitLCONST_0()
- jcode.emitLCMP()
- case FLOAT =>
- jcode.emitFCONST_0()
- if (cond == LT || cond == LE) jcode.emitFCMPG()
- else jcode.emitFCMPL()
- case DOUBLE =>
- jcode.emitDCONST_0()
- if (cond == LT || cond == LE) jcode.emitDCMPG()
- else jcode.emitDCMPL()
- }
- if (nextBlock == success) {
- jcode.emitIF(conds(cond.negate()), labels(failure))
- } else {
- jcode.emitIF(conds(cond), labels(success))
- if (nextBlock != failure)
- jcode.emitGOTO_maybe_W(labels(failure), false)
- }
- }
-
- case RETURN(kind) => jcode emitRETURN javaType(kind)
-
- case THROW(_) => jcode.emitATHROW()
-
- case DROP(kind) =>
- if(kind.isWideType) jcode.emitPOP2()
- else jcode.emitPOP()
-
- case DUP(kind) =>
- if(kind.isWideType) jcode.emitDUP2()
- else jcode.emitDUP()
-
- case MONITOR_ENTER() => jcode.emitMONITORENTER()
-
- case MONITOR_EXIT() => jcode.emitMONITOREXIT()
-
- case SCOPE_ENTER(lv) =>
- varsInBlock += lv
- lv.start = jcode.getPC()
-
- case SCOPE_EXIT(lv) =>
- if (varsInBlock(lv)) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- varsInBlock -= lv
- }
- else if (b.varsInScope(lv)) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- b.varsInScope -= lv
- }
- else dumpMethodAndAbort(method, "Illegal local var nesting")
-
- case LOAD_EXCEPTION(_) =>
- ()
- }
-
- crtPC = jcode.getPC()
-
- // assert(instr.pos.source.isEmpty || instr.pos.source.get == (clasz.cunit.source), "sources don't match")
- // val crtLine = instr.pos.line.get(lastLineNr);
-
- val crtLine = try {
- if (instr.pos == NoPosition) lastLineNr else (instr.pos).line // check NoPosition to avoid costly exception
- } catch {
- case _: UnsupportedOperationException =>
- log("Warning: wrong position in: " + method)
- lastLineNr
- }
-
- if (instr eq lastInstr) { endPC(b) = jcode.getPC() }
-
- //System.err.println("CRTLINE: " + instr.pos + " " +
- // /* (if (instr.pos < clasz.cunit.source.content.length) clasz.cunit.source.content(instr.pos) else '*') + */ " " + crtLine);
-
- if (crtPC > lastMappedPC) {
- jcode.completeLineNumber(lastMappedPC, crtPC, crtLine)
- lastMappedPC = crtPC
- lastLineNr = crtLine
- }
- }
-
- // local vars that survived this basic block
- for (lv <- varsInBlock) {
- lv.ranges = (lv.start, jcode.getPC()) :: lv.ranges
- }
- for (lv <- b.varsInScope) {
- lv.ranges = (labels(b).getAnchor(), jcode.getPC()) :: lv.ranges
- }
- }
-
-
- /**
- * @param primitive ...
- * @param pos ...
- */
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- if(kind.isIntSizedType) { jcode.emitINEG() }
- else {
- kind match {
- case LONG => jcode.emitLNEG()
- case FLOAT => jcode.emitFNEG()
- case DOUBLE => jcode.emitDNEG()
- case _ => abort("Impossible to negate a " + kind)
- }
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD =>
- if(kind.isIntSizedType) { jcode.emitIADD() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLADD()
- case FLOAT => jcode.emitFADD()
- case DOUBLE => jcode.emitDADD()
- }
- }
-
- case SUB =>
- if(kind.isIntSizedType) { jcode.emitISUB() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLSUB()
- case FLOAT => jcode.emitFSUB()
- case DOUBLE => jcode.emitDSUB()
- }
- }
-
- case MUL =>
- if(kind.isIntSizedType) { jcode.emitIMUL() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLMUL()
- case FLOAT => jcode.emitFMUL()
- case DOUBLE => jcode.emitDMUL()
- }
- }
-
- case DIV =>
- if(kind.isIntSizedType) { jcode.emitIDIV() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLDIV()
- case FLOAT => jcode.emitFDIV()
- case DOUBLE => jcode.emitDDIV()
- }
- }
-
- case REM =>
- if(kind.isIntSizedType) { jcode.emitIREM() }
- else {
- (kind: @unchecked) match {
- case LONG => jcode.emitLREM()
- case FLOAT => jcode.emitFREM()
- case DOUBLE => jcode.emitDREM()
- }
- }
-
- case NOT =>
- if(kind.isIntSizedType) {
- jcode.emitPUSH(-1)
- jcode.emitIXOR()
- } else if(kind == LONG) {
- jcode.emitPUSH(-1l)
- jcode.emitLXOR()
- } else {
- abort("Impossible to negate an " + kind)
- }
-
- case _ =>
- abort("Unknown arithmetic primitive " + primitive)
- }
-
- case Logical(op, kind) => ((op, kind): @unchecked) match {
- case (AND, LONG) => jcode.emitLAND()
- case (AND, INT) => jcode.emitIAND()
- case (AND, _) =>
- jcode.emitIAND()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (OR, LONG) => jcode.emitLOR()
- case (OR, INT) => jcode.emitIOR()
- case (OR, _) =>
- jcode.emitIOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
-
- case (XOR, LONG) => jcode.emitLXOR()
- case (XOR, INT) => jcode.emitIXOR()
- case (XOR, _) =>
- jcode.emitIXOR()
- if (kind != BOOL)
- jcode.emitT2T(javaType(INT), javaType(kind));
- }
-
- case Shift(op, kind) => ((op, kind): @unchecked) match {
- case (LSL, LONG) => jcode.emitLSHL()
- case (LSL, INT) => jcode.emitISHL()
- case (LSL, _) =>
- jcode.emitISHL()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (ASR, LONG) => jcode.emitLSHR()
- case (ASR, INT) => jcode.emitISHR()
- case (ASR, _) =>
- jcode.emitISHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
-
- case (LSR, LONG) => jcode.emitLUSHR()
- case (LSR, INT) => jcode.emitIUSHR()
- case (LSR, _) =>
- jcode.emitIUSHR()
- jcode.emitT2T(javaType(INT), javaType(kind))
- }
-
- case Comparison(op, kind) => ((op, kind): @unchecked) match {
- case (CMP, LONG) => jcode.emitLCMP()
- case (CMPL, FLOAT) => jcode.emitFCMPL()
- case (CMPG, FLOAT) => jcode.emitFCMPG()
- case (CMPL, DOUBLE) => jcode.emitDCMPL()
- case (CMPG, DOUBLE) => jcode.emitDCMPL()
- }
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
- if (dst == BOOL) {
- println("Illegal conversion at: " + clasz + " at: " + pos.source + ":" + pos.line)
- } else
- jcode.emitT2T(javaType(src), javaType(dst))
-
- case ArrayLength(_) =>
- jcode.emitARRAYLENGTH()
-
- case StartConcat =>
- jcode emitNEW StringBuilderClassName
- jcode.emitDUP()
- jcode.emitINVOKESPECIAL(StringBuilderClassName,
- JMethod.INSTANCE_CONSTRUCTOR_NAME,
- JMethodType.ARGLESS_VOID_FUNCTION)
-
- case StringConcat(el) =>
- val jtype = el match {
- case REFERENCE(_) | ARRAY(_) => JAVA_LANG_OBJECT
- case _ => javaType(el)
- }
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "append",
- new JMethodType(StringBuilderType,
- Array(jtype)))
- case EndConcat =>
- jcode.emitINVOKEVIRTUAL(StringBuilderClassName,
- "toString",
- toStringType)
-
- case _ =>
- abort("Unimplemented primitive " + primitive)
- }
- }
-
- // genCode starts here
- genBlocks(linearization)
-
- if (this.method.exh != Nil)
- genExceptionHandlers;
- }
-
-
- /** Emit a Local variable table for debugging purposes.
- * Synthetic locals are skipped. All variables are method-scoped.
- */
- private def genLocalVariableTable(m: IMethod, jcode: JCode) {
- val vars = m.locals filterNot (_.sym.isSynthetic)
- if (vars.isEmpty) return
-
- val pool = jclass.getConstantPool
- val pc = jcode.getPC()
- var anonCounter = 0
- var entries = 0
- vars.foreach { lv =>
- lv.ranges = mergeEntries(lv.ranges.reverse);
- entries += lv.ranges.length
- }
- if (!jmethod.isStatic()) entries += 1
-
- val lvTab = ByteBuffer.allocate(2 + 10 * entries)
- def emitEntry(name: String, signature: String, idx: Short, start: Short, end: Short) {
- lvTab putShort start
- lvTab putShort end
- lvTab putShort pool.addUtf8(name).toShort
- lvTab putShort pool.addUtf8(signature).toShort
- lvTab putShort idx
- }
-
- lvTab.putShort(entries.toShort)
-
- if (!jmethod.isStatic()) {
- emitEntry("this", jclass.getType().getSignature(), 0, 0.toShort, pc.toShort)
- }
-
- for (lv <- vars) {
- val name = if (javaName(lv.sym) eq null) {
- anonCounter += 1
- "<anon" + anonCounter + ">"
- } else javaName(lv.sym)
-
- val index = indexOf(lv).toShort
- val tpe = javaType(lv.kind).getSignature()
- for ((start, end) <- lv.ranges) {
- emitEntry(name, tpe, index, start.toShort, (end - start).toShort)
- }
- }
- val attr =
- fjbgContext.JOtherAttribute(jclass,
- jcode,
- tpnme.LocalVariableTableATTR.toString,
- lvTab.array())
- jcode addAttribute attr
- }
-
-
- /** For each basic block, the first PC address following it. */
- val endPC = new mutable.HashMap[BasicBlock, Int]
-
- ////////////////////// local vars ///////////////////////
-
- def sizeOf(sym: Symbol): Int = sizeOf(toTypeKind(sym.tpe))
-
- def sizeOf(k: TypeKind): Int = if(k.isWideType) 2 else 1
-
- def indexOf(m: IMethod, sym: Symbol): Int = {
- val Some(local) = m lookupLocal sym
- indexOf(local)
- }
-
- def indexOf(local: Local): Int = {
- assert(local.index >= 0, "Invalid index for: " + local + "{" + local.## + "}: ")
- local.index
- }
-
- /**
- * Compute the indexes of each local variable of the given
- * method. *Does not assume the parameters come first!*
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1;
-
- for (l <- m.params) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
-
- for (l <- m.locals if !(m.params contains l)) {
- debuglog("Index value for " + l + "{" + l.## + "}: " + idx)
- l.index = idx
- idx += sizeOf(l.kind)
- }
- }
-
- ////////////////////// Utilities ////////////////////////
-
- /** Merge adjacent ranges. */
- private def mergeEntries(ranges: List[(Int, Int)]): List[(Int, Int)] =
- (ranges.foldLeft(Nil: List[(Int, Int)]) { (collapsed: List[(Int, Int)], p: (Int, Int)) => (collapsed, p) match {
- case (Nil, _) => List(p)
- case ((s1, e1) :: rest, (s2, e2)) if (e1 == s2) => (s1, e2) :: rest
- case _ => p :: collapsed
- }}).reverse
- }
-
- private def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
-
- /**
- * Return the Java modifiers for the given symbol.
- * Java modifiers for classes:
- * - public, abstract, final, strictfp (not used)
- * for interfaces:
- * - the same as for classes, without 'final'
- * for fields:
- * - public, private (*)
- * - static, final
- * for methods:
- * - the same as for fields, plus:
- * - abstract, synchronized (not used), strictfp (not used), native (not used)
- *
- * (*) protected cannot be used, since inner classes 'see' protected members,
- * and they would fail verification after lifted.
- */
- def javaFlags(sym: Symbol): Int = {
- // constructors of module classes should be private
- // PP: why are they only being marked private at this stage and not earlier?
- val privateFlag =
- sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
-
- // Final: the only fields which can receive ACC_FINAL are eager vals.
- // Neither vars nor lazy vals can, because:
- //
- // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
- // "Another problem is that the specification allows aggressive
- // optimization of final fields. Within a thread, it is permissible to
- // reorder reads of a final field with those modifications of a final
- // field that do not take place in the constructor."
- //
- // A var or lazy val which is marked final still has meaning to the
- // scala compiler. The word final is heavily overloaded unfortunately;
- // for us it means "not overridable". At present you can't override
- // vars regardless; this may change.
- //
- // The logic does not check .isFinal (which checks flags for the FINAL flag,
- // and includes symbols marked lateFINAL) instead inspecting rawflags so
- // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
- // avoid breaking proxy software which depends on subclassing, we do not
- // emit ACC_FINAL.
- // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
-
- val finalFlag = (
- (((sym.rawflags & Flags.FINAL) != 0) || isTopLevelModule(sym))
- && !sym.enclClass.isInterface
- && !sym.isClassConstructor
- && !sym.isMutable // lazy vals and vars both
- )
-
- // Primitives are "abstract final" to prohibit instantiation
- // without having to provide any implementations, but that is an
- // illegal combination of modifiers at the bytecode level so
- // suppress final if abstract if present.
- mkFlags(
- if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
- if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
- if (sym.isInterface) ACC_INTERFACE else 0,
- if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
- if (sym.isStaticMember) ACC_STATIC else 0,
- if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
- if (sym.isArtifact) ACC_SYNTHETIC else 0,
- if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
- if (sym.isVarargsMethod) ACC_VARARGS else 0,
- if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0
- )
- }
- def javaFieldFlags(sym: Symbol) = (
- javaFlags(sym) | mkFlags(
- if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0,
- if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0,
- if (sym.isMutable) 0 else ACC_FINAL
- )
- )
-
- def isTopLevelModule(sym: Symbol): Boolean =
- afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
-
- def isStaticModule(sym: Symbol): Boolean = {
- sym.isModuleClass && !sym.isImplClass && !sym.isLifted
- }
-
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
index 540935fd57..2c3bf26958 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMASM.scala
@@ -5,34 +5,19 @@
package scala.tools.nsc
package backend.jvm
-import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.symtab._
-/** Code shared between the legagy backend [[scala.tools.nsc.backend.jvm.GenJVM]]
- * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
- * more here, but for now I'm starting with the refactorings that are either
- * straightforward to review or necessary for maintenance.
- */
+/** Code shared between the erstwhile legacy backend (aka GenJVM)
+ * and the new backend [[scala.tools.nsc.backend.jvm.GenASM]]. There should be
+ * more here, but for now I'm starting with the refactorings that are either
+ * straightforward to review or necessary for maintenance.
+ */
trait GenJVMASM {
val global: Global
import global._
import icodes._
import definitions._
- protected def outputDirectory(sym: Symbol): AbstractFile =
- settings.outputDirs outputDirFor beforeFlatten(sym.sourceFile)
-
- protected def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
- var dir = base
- val pathParts = clsName.split("[./]").toList
- for (part <- pathParts.init) {
- dir = dir.subdirectoryNamed(part)
- }
- dir.fileNamed(pathParts.last + suffix)
- }
- protected def getFile(sym: Symbol, clsName: String, suffix: String): AbstractFile =
- getFile(outputDirectory(sym), clsName, suffix)
-
protected val ExcludedForwarderFlags = {
import Flags._
// Should include DEFERRED but this breaks findMember.
@@ -65,9 +50,8 @@ trait GenJVMASM {
// At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
hasApproximate && {
// Before erasure so we can identify generic mains.
- beforeErasure {
+ enteringErasure {
val companion = sym.linkedClassOfClass
- val companionMain = companion.tpe.member(nme.main)
if (hasJavaMainMethod(companion))
failNoForwarder("companion contains its own main method")
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
deleted file mode 100644
index e002a614bd..0000000000
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVMUtil.scala
+++ /dev/null
@@ -1,142 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Iulian Dragos
- */
-
-package scala.tools.nsc
-package backend.jvm
-
-import scala.collection.{ mutable, immutable }
-import ch.epfl.lamp.fjbg._
-
-trait GenJVMUtil {
- self: GenJVM =>
-
- import global._
- import icodes._
- import icodes.opcodes._
- import definitions._
-
- /** Map from type kinds to the Java reference types. It is used for
- * loading class constants. @see Predef.classOf.
- */
- val classLiteral = immutable.Map[TypeKind, JObjectType](
- UNIT -> new JObjectType("java.lang.Void"),
- BOOL -> new JObjectType("java.lang.Boolean"),
- BYTE -> new JObjectType("java.lang.Byte"),
- SHORT -> new JObjectType("java.lang.Short"),
- CHAR -> new JObjectType("java.lang.Character"),
- INT -> new JObjectType("java.lang.Integer"),
- LONG -> new JObjectType("java.lang.Long"),
- FLOAT -> new JObjectType("java.lang.Float"),
- DOUBLE -> new JObjectType("java.lang.Double")
- )
-
- // Don't put this in per run caches.
- private val javaNameCache = new mutable.WeakHashMap[Symbol, Name]() ++= List(
- NothingClass -> binarynme.RuntimeNothing,
- RuntimeNothingClass -> binarynme.RuntimeNothing,
- NullClass -> binarynme.RuntimeNull,
- RuntimeNullClass -> binarynme.RuntimeNull
- )
-
- /** This trait may be used by tools who need access to
- * utility methods like javaName and javaType. (for instance,
- * the Eclipse plugin uses it).
- */
- trait BytecodeUtil {
-
- val conds = immutable.Map[TestOp, Int](
- EQ -> JExtendedCode.COND_EQ,
- NE -> JExtendedCode.COND_NE,
- LT -> JExtendedCode.COND_LT,
- GT -> JExtendedCode.COND_GT,
- LE -> JExtendedCode.COND_LE,
- GE -> JExtendedCode.COND_GE
- )
-
- /** Specialized array conversion to prevent calling
- * java.lang.reflect.Array.newInstance via TraversableOnce.toArray
- */
-
- def mkArray(xs: Traversable[JType]): Array[JType] = { val a = new Array[JType](xs.size); xs.copyToArray(a); a }
- def mkArray(xs: Traversable[String]): Array[String] = { val a = new Array[String](xs.size); xs.copyToArray(a); a }
-
- /** Return the a name of this symbol that can be used on the Java
- * platform. It removes spaces from names.
- *
- * Special handling:
- * scala.Nothing erases to scala.runtime.Nothing$
- * scala.Null erases to scala.runtime.Null$
- *
- * This is needed because they are not real classes, and they mean
- * 'abrupt termination upon evaluation of that expression' or null respectively.
- * This handling is done already in GenICode, but here we need to remove
- * references from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- */
- def javaName(sym: Symbol): String =
- javaNameCache.getOrElseUpdate(sym, {
- if (sym.isClass || (sym.isModule && !sym.isMethod))
- sym.javaBinaryName
- else
- sym.javaSimpleName
- }).toString
-
- def javaType(t: TypeKind): JType = (t: @unchecked) match {
- case UNIT => JType.VOID
- case BOOL => JType.BOOLEAN
- case BYTE => JType.BYTE
- case SHORT => JType.SHORT
- case CHAR => JType.CHAR
- case INT => JType.INT
- case LONG => JType.LONG
- case FLOAT => JType.FLOAT
- case DOUBLE => JType.DOUBLE
- case REFERENCE(cls) => new JObjectType(javaName(cls))
- case ARRAY(elem) => new JArrayType(javaType(elem))
- }
-
- def javaType(t: Type): JType = javaType(toTypeKind(t))
-
- def javaType(s: Symbol): JType =
- if (s.isMethod)
- new JMethodType(
- if (s.isClassConstructor) JType.VOID else javaType(s.tpe.resultType),
- mkArray(s.tpe.paramTypes map javaType)
- )
- else
- javaType(s.tpe)
-
- protected def genConstant(jcode: JExtendedCode, const: Constant) {
- const.tag match {
- case UnitTag => ()
- case BooleanTag => jcode emitPUSH const.booleanValue
- case ByteTag => jcode emitPUSH const.byteValue
- case ShortTag => jcode emitPUSH const.shortValue
- case CharTag => jcode emitPUSH const.charValue
- case IntTag => jcode emitPUSH const.intValue
- case LongTag => jcode emitPUSH const.longValue
- case FloatTag => jcode emitPUSH const.floatValue
- case DoubleTag => jcode emitPUSH const.doubleValue
- case StringTag => jcode emitPUSH const.stringValue
- case NullTag => jcode.emitACONST_NULL()
- case ClazzTag =>
- val kind = toTypeKind(const.typeValue)
- val toPush =
- if (kind.isValueType) classLiteral(kind)
- else javaType(kind).asInstanceOf[JReferenceType]
-
- jcode emitPUSH toPush
-
- case EnumTag =>
- val sym = const.symbolValue
- jcode.emitGETSTATIC(javaName(sym.owner),
- javaName(sym),
- javaType(sym.tpe.underlying))
- case _ =>
- abort("Unknown constant value: " + const)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
deleted file mode 100644
index aaffaa84d8..0000000000
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ /dev/null
@@ -1,2358 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Nikolay Mihaylov
- */
-
-
-package scala.tools.nsc
-package backend.msil
-
-import java.io.{File, IOException}
-import java.nio.{ByteBuffer, ByteOrder}
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.symtab._
-
-import ch.epfl.lamp.compiler.msil.{Type => MsilType, _}
-import ch.epfl.lamp.compiler.msil.emit._
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-import scala.language.postfixOps
-
-abstract class GenMSIL extends SubComponent {
- import global._
- import loaders.clrTypes
- import clrTypes.{types, constructors, methods, fields}
- import icodes._
- import icodes.opcodes._
-
- val x = loaders
-
- /** Create a new phase */
- override def newPhase(p: Phase) = new MsilPhase(p)
-
- val phaseName = "msil"
- /** MSIL code generation phase
- */
- class MsilPhase(prev: Phase) extends GlobalPhase(prev) {
- def name = phaseName
- override def newFlags = phaseNewFlags
-
- override def erasedTypes = true
-
- override def run() {
- if (settings.debug.value) inform("[running phase " + name + " on icode]")
-
- val codeGenerator = new BytecodeGenerator
-
- //classes is ICodes.classes, a HashMap[Symbol, IClass]
- classes.values foreach codeGenerator.findEntryPoint
- if( opt.showClass.isDefined && (codeGenerator.entryPoint == null) ) { // TODO introduce dedicated setting instead
- val entryclass = opt.showClass.get.toString
- warning("Couldn't find entry class " + entryclass)
- }
-
- codeGenerator.initAssembly
-
- val classesSorted = classes.values.toList.sortBy(c => c.symbol.id) // simplifies comparing cross-compiler vs. .exe output
- classesSorted foreach codeGenerator.createTypeBuilder
- classesSorted foreach codeGenerator.createClassMembers
-
- try {
- classesSorted foreach codeGenerator.genClass
- } finally {
- codeGenerator.writeAssembly
- }
- }
-
- override def apply(unit: CompilationUnit) {
- abort("MSIL works on icode classes, not on compilation units!")
- }
- }
-
- /**
- * MSIL bytecode generator.
- *
- */
- class BytecodeGenerator {
-
- val MODULE_INSTANCE_NAME = "MODULE$"
-
- import clrTypes.{VOID => MVOID, BOOLEAN => MBOOL, BYTE => MBYTE, SHORT => MSHORT,
- CHAR => MCHAR, INT => MINT, LONG => MLONG, FLOAT => MFLOAT,
- DOUBLE => MDOUBLE, OBJECT => MOBJECT, STRING => MSTRING,
- STRING_ARRAY => MSTRING_ARRAY,
- SYMTAB_CONSTR => SYMTAB_ATTRIBUTE_CONSTRUCTOR,
- SYMTAB_DEFAULT_CONSTR => SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR}
-
- val EXCEPTION = clrTypes.getType("System.Exception")
- val MBYTE_ARRAY = clrTypes.mkArrayType(MBYTE)
-
- val ICLONEABLE = clrTypes.getType("System.ICloneable")
- val MEMBERWISE_CLONE = MOBJECT.GetMethod("MemberwiseClone", MsilType.EmptyTypes)
-
- val MMONITOR = clrTypes.getType("System.Threading.Monitor")
- val MMONITOR_ENTER = MMONITOR.GetMethod("Enter", Array(MOBJECT))
- val MMONITOR_EXIT = MMONITOR.GetMethod("Exit", Array(MOBJECT))
-
- val MSTRING_BUILDER = clrTypes.getType("System.Text.StringBuilder")
- val MSTRING_BUILDER_CONSTR = MSTRING_BUILDER.GetConstructor(MsilType.EmptyTypes)
- val MSTRING_BUILDER_TOSTRING = MSTRING_BUILDER.GetMethod("ToString",
- MsilType.EmptyTypes)
-
- val TYPE_FROM_HANDLE =
- clrTypes.getType("System.Type").GetMethod("GetTypeFromHandle", Array(clrTypes.getType("System.RuntimeTypeHandle")))
-
- val INT_PTR = clrTypes.getType("System.IntPtr")
-
- val JOBJECT = definitions.ObjectClass
- val JSTRING = definitions.StringClass
-
- val SystemConvert = clrTypes.getType("System.Convert")
-
- val objParam = Array(MOBJECT)
-
- val toBool: MethodInfo = SystemConvert.GetMethod("ToBoolean", objParam) // see comment in emitUnbox
- val toSByte: MethodInfo = SystemConvert.GetMethod("ToSByte", objParam)
- val toShort: MethodInfo = SystemConvert.GetMethod("ToInt16", objParam)
- val toChar: MethodInfo = SystemConvert.GetMethod("ToChar", objParam)
- val toInt: MethodInfo = SystemConvert.GetMethod("ToInt32", objParam)
- val toLong: MethodInfo = SystemConvert.GetMethod("ToInt64", objParam)
- val toFloat: MethodInfo = SystemConvert.GetMethod("ToSingle", objParam)
- val toDouble: MethodInfo = SystemConvert.GetMethod("ToDouble", objParam)
-
- //val boxedUnit: FieldInfo = msilType(definitions.BoxedUnitModule.info).GetField("UNIT")
- val boxedUnit: FieldInfo = fields(definitions.BoxedUnit_UNIT)
-
- // Scala attributes
- // symtab.Definitions -> object (singleton..)
- val SerializableAttr = definitions.SerializableAttr.tpe
- val CloneableAttr = definitions.CloneableAttr.tpe
- val TransientAtt = definitions.TransientAttr.tpe
- // remoting: the architectures are too different, no mapping (no portable code
- // possible)
-
- // java instance methods that are mapped to static methods in .net
- // these will need to be called with OpCodes.Call (not Callvirt)
- val dynToStatMapped = mutable.HashSet[Symbol]()
-
- initMappings()
-
- /** Create the mappings between java and .net classes and methods */
- private def initMappings() {
- mapType(definitions.AnyClass, MOBJECT)
- mapType(definitions.AnyRefClass, MOBJECT)
- //mapType(definitions.NullClass, clrTypes.getType("scala.AllRef$"))
- //mapType(definitions.NothingClass, clrTypes.getType("scala.All$"))
- // FIXME: for some reason the upper two lines map to null
- mapType(definitions.NullClass, EXCEPTION)
- mapType(definitions.NothingClass, EXCEPTION)
-
- mapType(definitions.BooleanClass, MBOOL)
- mapType(definitions.ByteClass, MBYTE)
- mapType(definitions.ShortClass, MSHORT)
- mapType(definitions.CharClass, MCHAR)
- mapType(definitions.IntClass, MINT)
- mapType(definitions.LongClass, MLONG)
- mapType(definitions.FloatClass, MFLOAT)
- mapType(definitions.DoubleClass, MDOUBLE)
- }
-
- var clasz: IClass = _
- var method: IMethod = _
-
- var massembly: AssemblyBuilder = _
- var mmodule: ModuleBuilder = _
- var mcode: ILGenerator = _
-
- var assemName: String = _
- var firstSourceName = ""
- var outDir: File = _
- var srcPath: File = _
- var moduleName: String = _
-
- def initAssembly() {
-
- assemName = settings.assemname.value
-
- if (assemName == "") {
- if (entryPoint != null) {
- assemName = msilName(entryPoint.enclClass)
- // remove the $ at the end (from module-name)
- assemName = assemName.substring(0, assemName.length() - 1)
- } else {
- // assuming filename of first source file
- assert(firstSourceName.endsWith(".scala"), firstSourceName)
- assemName = firstSourceName.substring(0, firstSourceName.length() - 6)
- }
- } else {
- if (assemName.endsWith(".msil"))
- assemName = assemName.substring(0, assemName.length()-5)
- if (assemName.endsWith(".il"))
- assemName = assemName.substring(0, assemName.length()-3)
- val f: File = new File(assemName)
- assemName = f.getName()
- }
-
- outDir = new File(settings.outdir.value)
-
- srcPath = new File(settings.sourcedir.value)
-
- val assemblyName = new AssemblyName()
- assemblyName.Name = assemName
- massembly = AssemblyBuilderFactory.DefineDynamicAssembly(assemblyName)
-
- moduleName = assemName // + (if (entryPoint == null) ".dll" else ".exe")
- // filename here: .dll or .exe (in both parameters), second: give absolute-path
- mmodule = massembly.DefineDynamicModule(moduleName,
- new File(outDir, moduleName).getAbsolutePath())
- assert (mmodule != null)
- }
-
-
- /**
- * Form of the custom Attribute parameter (Ecma-335.pdf)
- * - p. 163 for CustomAttrib Form,
- * - p. 164 for FixedArg Form (Array and Element) (if array or not is known!)
- * !! least significant byte first if values longer than one byte !!
- *
- * 1: Prolog (unsigned int16, value 0x0001) -> symtab[0] = 0x01, symtab[1] = 0x00
- * 2: FixedArgs (directly the data, get number and types from related constructor)
- * 2.1: length of the array (unsigned int32, 4 bytes, least significant first)
- * 2.2: the byte array data
- * 3: NumNamed (unsigned int16, number of named fields and properties, 0x0000)
- */
- def addSymtabAttribute(sym: Symbol, tBuilder: TypeBuilder) {
- def addMarker() {
- val markerSymtab = new Array[Byte](4)
- markerSymtab(0) = 1.toByte
- tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_EMPTY_CONSTRUCTOR, markerSymtab)
- }
-
- // both conditions are needed (why exactly..?)
- if (tBuilder.Name.endsWith("$") || sym.isModuleClass) {
- addMarker()
- } else {
- currentRun.symData.get(sym) match {
- case Some(pickle) =>
- var size = pickle.writeIndex
- val symtab = new Array[Byte](size + 8)
- symtab(0) = 1.toByte
- for (i <- 2 until 6) {
- symtab(i) = (size & 0xff).toByte
- size = size >> 8
- }
- java.lang.System.arraycopy(pickle.bytes, 0, symtab, 6, pickle.writeIndex)
-
- tBuilder.SetCustomAttribute(SYMTAB_ATTRIBUTE_CONSTRUCTOR, symtab)
-
- currentRun.symData -= sym
- currentRun.symData -= sym.companionSymbol
-
- case _ =>
- addMarker()
- }
- }
- }
-
- /**
- * Mutates `member` adding CLR attributes (if any) based on sym.annotations.
- * Please notice that CLR custom modifiers are a different beast (see customModifiers below)
- * and thus shouldn't be added by this method.
- */
- def addAttributes(member: ICustomAttributeSetter, annotations: List[AnnotationInfo]) {
- val attributes = annotations.map(_.atp.typeSymbol).collect {
- case definitions.TransientAttr => null // TODO this is just an example
- }
- return // TODO: implement at some point
- }
-
- /**
- * What's a CLR custom modifier? Intro available as source comments in compiler.msil.CustomModifier.
- * It's basically a marker associated with a location (think of FieldInfo, ParameterInfo, and PropertyInfo)
- * and thus that marker (be it optional or required) becomes part of the signature of that location.
- * Some annotations will become CLR attributes (see addAttributes above), others custom modifiers (this method).
- */
- def customModifiers(annotations: List[AnnotationInfo]): Array[CustomModifier] = {
- annotations.map(_.atp.typeSymbol).collect {
- case definitions.VolatileAttr => new CustomModifier(true, CustomModifier.VolatileMarker)
- } toArray
- }
-
-
-
- /*
- debuglog("creating annotations: " + annotations + " for member : " + member)
- for (annot@ AnnotationInfo(typ, annArgs, nvPairs) <- annotations ;
- if annot.isConstant)
- //!typ.typeSymbol.isJavaDefined
- {
-// assert(consts.length <= 1,
-// "too many constant arguments for annotations; "+consts.toString())
-
- // Problem / TODO having the symbol of the annotations type would be nicer
- // (i hope that type.typeSymbol is the same as the one in types2create)
- // AND: this will crash if the annotations Type is already compiled (-> not a typeBuilder)
- // when this is solved, types2create will be the same as icodes.classes, thus superfluous
- val annType: TypeBuilder = getType(typ.typeSymbol).asInstanceOf[TypeBuilder]
-// val annType: MsilType = getType(typ.typeSymbol)
-
- // Problem / TODO: i have no idea which constructor is used. This
- // information should be available in AnnotationInfo.
- annType.CreateType() // else, GetConstructors can't be used
- val constr: ConstructorInfo = annType.GetConstructors()(0)
- // prevent a second call of CreateType, only needed because there's no
- // other way than GetConstructors()(0) to get the constructor, if there's
- // no constructor symbol available.
-
- val args: Array[Byte] =
- getAttributeArgs(
- annArgs map (_.constant.get),
- (for((n,v) <- nvPairs) yield (n, v.constant.get)))
- member.SetCustomAttribute(constr, args)
- }
- } */
-
-/* def getAttributeArgs(consts: List[Constant], nvPairs: List[(Name, Constant)]): Array[Byte] = {
- val buf = ByteBuffer.allocate(2048) // FIXME: this may be not enough!
- buf.order(ByteOrder.LITTLE_ENDIAN)
- buf.putShort(1.toShort) // signature
-
- def emitSerString(str: String) = {
- // this is wrong, it has to be the length of the UTF-8 byte array, which
- // may be longer (see clr-book on page 302)
-// val length: Int = str.length
- val strBytes: Array[Byte] = try {
- str.getBytes("UTF-8")
- } catch {
- case _: Error => abort("could not get byte-array for string: " + str)
- }
- val length: Int = strBytes.length //this length is stored big-endian
- if (length < 128)
- buf.put(length.toByte)
- else if (length < (1<<14)) {
- buf.put(((length >> 8) | 0x80).toByte) // the bits 14 and 15 of length are '0'
- buf.put((length | 0xff).toByte)
- } else if (length < (1 << 29)) {
- buf.put(((length >> 24) | 0xc0).toByte)
- buf.put(((length >> 16) & 0xff).toByte)
- buf.put(((length >> 8) & 0xff).toByte)
- buf.put(((length ) & 0xff).toByte)
- } else
- abort("string too long for attribute parameter: " + length)
- buf.put(strBytes)
- }
-
- def emitConst(const: Constant): Unit = const.tag match {
- case BooleanTag => buf.put((if (const.booleanValue) 1 else 0).toByte)
- case ByteTag => buf.put(const.byteValue)
- case ShortTag => buf.putShort(const.shortValue)
- case CharTag => buf.putChar(const.charValue)
- case IntTag => buf.putInt(const.intValue)
- case LongTag => buf.putLong(const.longValue)
- case FloatTag => buf.putFloat(const.floatValue)
- case DoubleTag => buf.putDouble(const.doubleValue)
- case StringTag =>
- val str: String = const.stringValue
- if (str == null) {
- buf.put(0xff.toByte)
- } else {
- emitSerString(str)
- }
- case ArrayTag =>
- val arr: Array[Constant] = const.arrayValue
- if (arr == null) {
- buf.putInt(0xffffffff)
- } else {
- buf.putInt(arr.length)
- arr.foreach(emitConst)
- }
-
- // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag, ArrayTag ???
-
- case _ => abort("could not handle attribute argument: " + const)
- }
-
- consts foreach emitConst
- buf.putShort(nvPairs.length.toShort)
- def emitNamedArg(nvPair: (Name, Constant)) {
- // the named argument is a property of the attribute (it can't be a field, since
- // all fields in scala are private)
- buf.put(0x54.toByte)
-
- def emitType(c: Constant) = c.tag match { // type of the constant, Ecma-335.pdf, page 151
- case BooleanTag => buf.put(0x02.toByte)
- case ByteTag => buf.put(0x05.toByte)
- case ShortTag => buf.put(0x06.toByte)
- case CharTag => buf.put(0x07.toByte)
- case IntTag => buf.put(0x08.toByte)
- case LongTag => buf.put(0x0a.toByte)
- case FloatTag => buf.put(0x0c.toByte)
- case DoubleTag => buf.put(0x0d.toByte)
- case StringTag => buf.put(0x0e.toByte)
-
- // TODO: other Tags: NoTag, UnitTag, ClazzTag, EnumTag ???
-
- // ArrayTag falls in here
- case _ => abort("could not handle attribute argument: " + c)
- }
-
- val cnst: Constant = nvPair._2
- if (cnst.tag == ArrayTag) {
- buf.put(0x1d.toByte)
- emitType(cnst.arrayValue(0)) // FIXME: will crash if array length = 0
- } else if (cnst.tag == EnumTag) {
- buf.put(0x55.toByte)
- // TODO: put a SerString (don't know what exactly, names of the enums somehow..)
- } else {
- buf.put(0x51.toByte)
- emitType(cnst)
- }
-
- emitSerString(nvPair._1.toString)
- emitConst(nvPair._2)
- }
-
- val length = buf.position()
- buf.array().slice(0, length)
- } */
-
- def writeAssembly() {
- if (entryPoint != null) {
- assert(entryPoint.enclClass.isModuleClass, entryPoint.enclClass)
- val mainMethod = methods(entryPoint)
- val stringArrayTypes: Array[MsilType] = Array(MSTRING_ARRAY)
- val globalMain = mmodule.DefineGlobalMethod(
- "Main", MethodAttributes.Public | MethodAttributes.Static,
- MVOID, stringArrayTypes)
- globalMain.DefineParameter(0, ParameterAttributes.None, "args")
- massembly.SetEntryPoint(globalMain)
- val code = globalMain.GetILGenerator()
- val moduleField = getModuleInstanceField(entryPoint.enclClass)
- code.Emit(OpCodes.Ldsfld, moduleField)
- code.Emit(OpCodes.Ldarg_0)
- code.Emit(OpCodes.Callvirt, mainMethod)
- code.Emit(OpCodes.Ret)
- }
- createTypes()
- var outDirName: String = null
- try {
- if (settings.Ygenjavap.isDefault) { // we reuse the JVM-sounding setting because it's conceptually similar
- outDirName = outDir.getPath()
- massembly.Save(outDirName + "\\" + assemName + ".msil") /* use SingleFileILPrinterVisitor */
- } else {
- outDirName = srcPath.getPath()
- massembly.Save(settings.Ygenjavap.value, outDirName) /* use MultipleFilesILPrinterVisitor */
- }
- } catch {
- case e:IOException => abort("Could not write to " + outDirName + ": " + e.getMessage())
- }
- }
-
- private def createTypes() {
- for (sym <- classes.keys) {
- val iclass = classes(sym)
- val tBuilder = types(sym).asInstanceOf[TypeBuilder]
-
- debuglog("Calling CreatType for " + sym + ", " + tBuilder.toString)
-
- tBuilder.CreateType()
- tBuilder.setSourceFilepath(iclass.cunit.source.file.path)
- }
- }
-
- private[GenMSIL] def ilasmFileName(iclass: IClass) : String = {
- // method.sourceFile contains just the filename
- iclass.cunit.source.file.toString.replace("\\", "\\\\")
- }
-
- private[GenMSIL] def genClass(iclass: IClass) {
- val sym = iclass.symbol
- debuglog("Generating class " + sym + " flags: " + Flags.flagsToString(sym.flags))
- clasz = iclass
-
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
- if (isCloneable(sym)) {
- // FIXME: why there's no nme.clone_ ?
- // "Clone": if the code is non-portable, "Clone" is defined, not "clone"
- // TODO: improve condition (should override AnyRef.clone)
- if (iclass.methods.forall(m => {
- !((m.symbol.name.toString != "clone" || m.symbol.name.toString != "Clone") &&
- m.symbol.tpe.paramTypes.length != 0)
- })) {
- debuglog("auto-generating cloneable method for " + sym)
- val attrs: Short = (MethodAttributes.Public | MethodAttributes.Virtual |
- MethodAttributes.HideBySig).toShort
- val cloneMethod = tBuilder.DefineMethod("Clone", attrs, MOBJECT,
- MsilType.EmptyTypes)
- val clCode = cloneMethod.GetILGenerator()
- clCode.Emit(OpCodes.Ldarg_0)
- clCode.Emit(OpCodes.Call, MEMBERWISE_CLONE)
- clCode.Emit(OpCodes.Ret)
- }
- }
-
- val line = sym.pos.line
- tBuilder.setPosition(line, ilasmFileName(iclass))
-
- if (isTopLevelModule(sym)) {
- if (sym.companionClass == NoSymbol)
- generateMirrorClass(sym)
- else
- log("No mirror class for module with linked class: " +
- sym.fullName)
- }
-
- addSymtabAttribute(sym, tBuilder)
- addAttributes(tBuilder, sym.annotations)
-
- if (iclass.symbol != definitions.ArrayClass)
- iclass.methods foreach genMethod
-
- } //genClass
-
-
- private def genMethod(m: IMethod) {
- debuglog("Generating method " + m.symbol + " flags: " + Flags.flagsToString(m.symbol.flags) +
- " owner: " + m.symbol.owner)
- method = m
- localBuilders.clear
- computeLocalVarsIndex(m)
-
- if (m.symbol.isClassConstructor) {
- mcode = constructors(m.symbol).asInstanceOf[ConstructorBuilder].GetILGenerator()
- } else {
- val mBuilder = methods(m.symbol).asInstanceOf[MethodBuilder]
- if (!mBuilder.IsAbstract())
- try {
- mcode = mBuilder.GetILGenerator()
- } catch {
- case e: Exception =>
- java.lang.System.out.println("m.symbol = " + Flags.flagsToString(m.symbol.flags) + " " + m.symbol)
- java.lang.System.out.println("m.symbol.owner = " + Flags.flagsToString(m.symbol.owner.flags) + " " + m.symbol.owner)
- java.lang.System.out.println("mBuilder = " + mBuilder)
- java.lang.System.out.println("mBuilder.DeclaringType = " +
- TypeAttributes.toString(mBuilder.DeclaringType.Attributes) +
- "::" + mBuilder.DeclaringType)
- throw e
- }
- else
- mcode = null
- }
-
- if (mcode != null) {
- for (local <- m.locals ; if !(m.params contains local)) {
- debuglog("add local var: " + local + ", of kind " + local.kind)
- val t: MsilType = msilType(local.kind)
- val localBuilder = mcode.DeclareLocal(t)
- localBuilder.SetLocalSymInfo(msilName(local.sym))
- localBuilders(local) = localBuilder
- }
- genCode(m)
- }
-
- }
-
- /** Special linearizer for methods with at least one exception handler. This
- * linearizer brings all basic blocks in the right order so that nested
- * try-catch and try-finally blocks can be emitted.
- */
- val msilLinearizer = new MSILLinearizer()
-
- val labels = mutable.HashMap[BasicBlock, Label]()
-
- /* when emitting .line, it's enough to include the full filename just once per method, thus reducing filesize.
- * this scheme relies on the fact that the entry block is emitted first. */
- var dbFilenameSeen = false
-
- def genCode(m: IMethod) {
-
- def makeLabels(blocks: List[BasicBlock]) = {
- debuglog("Making labels for: " + method)
- for (bb <- blocks) labels(bb) = mcode.DefineLabel()
- }
-
- labels.clear
-
- var linearization = if(!m.exh.isEmpty) msilLinearizer.linearize(m)
- else linearizer.linearize(m)
-
- if (!m.exh.isEmpty)
- linearization = computeExceptionMaps(linearization, m)
-
- makeLabels(linearization)
-
- // debug val blocksInM = m.code.blocks.toList.sortBy(bb => bb.label)
- // debug val blocksInL = linearization.sortBy(bb => bb.label)
- // debug val MButNotL = (blocksInM.toSet) diff (blocksInL.toSet) // if non-empty, a jump to B fails to find a label for B (case CJUMP, case CZJUMP)
- // debug if(!MButNotL.isEmpty) { }
-
- dbFilenameSeen = false
- genBlocks(linearization)
-
- // RETURN inside exception blocks are replaced by Leave. The target of the
- // leave is a `Ret` outside any exception block (generated here).
- if (handlerReturnMethod == m) {
- mcode.MarkLabel(handlerReturnLabel)
- if (handlerReturnKind != UNIT)
- mcode.Emit(OpCodes.Ldloc, handlerReturnLocal)
- mcode.Emit(OpCodes.Ret)
- }
-
- beginExBlock.clear()
- beginCatchBlock.clear()
- endExBlock.clear()
- endFinallyLabels.clear()
- }
-
- def genBlocks(blocks: List[BasicBlock], previous: BasicBlock = null) {
- blocks match {
- case Nil => ()
- case x :: Nil => genBlock(x, prev = previous, next = null)
- case x :: y :: ys => genBlock(x, prev = previous, next = y); genBlocks(y :: ys, previous = x)
- }
- }
-
- // the try blocks starting at a certain BasicBlock
- val beginExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
- // the catch blocks starting / endling at a certain BasicBlock
- val beginCatchBlock = mutable.HashMap[BasicBlock, ExceptionHandler]()
- val endExBlock = mutable.HashMap[BasicBlock, List[ExceptionHandler]]()
-
- /** When emitting the code (genBlock), the number of currently active try / catch
- * blocks. When seeing a `RETURN` inside a try / catch, we need to
- * - store the result in a local (if it's not UNIT)
- * - emit `Leave handlerReturnLabel` instead of the Return
- * - emit code at the end: load the local and return its value
- */
- var currentHandlers = new mutable.Stack[ExceptionHandler]
- // The IMethod the Local/Label/Kind below belong to
- var handlerReturnMethod: IMethod = _
- // Stores the result when returning inside an exception block
- var handlerReturnLocal: LocalBuilder = _
- // Label for a return instruction outside any exception block
- var handlerReturnLabel: Label = _
- // The result kind.
- var handlerReturnKind: TypeKind = _
- def returnFromHandler(kind: TypeKind): (LocalBuilder, Label) = {
- if (handlerReturnMethod != method) {
- handlerReturnMethod = method
- if (kind != UNIT) {
- handlerReturnLocal = mcode.DeclareLocal(msilType(kind))
- handlerReturnLocal.SetLocalSymInfo("$handlerReturn")
- }
- handlerReturnLabel = mcode.DefineLabel()
- handlerReturnKind = kind
- }
- (handlerReturnLocal, handlerReturnLabel)
- }
-
- /** For try/catch nested inside a finally, we can't use `Leave OutsideFinally`, the
- * Leave target has to be inside the finally (and it has to be the `endfinally` instruction).
- * So for every finalizer, we have a label which marks the place of the `endfinally`,
- * nested try/catch blocks will leave there.
- */
- val endFinallyLabels = mutable.HashMap[ExceptionHandler, Label]()
-
- /** Computes which blocks are the beginning / end of a try or catch block */
- private def computeExceptionMaps(blocks: List[BasicBlock], m: IMethod): List[BasicBlock] = {
- val visitedBlocks = new mutable.HashSet[BasicBlock]()
-
- // handlers which have not been introduced so far
- var openHandlers = m.exh
-
-
- /** Example
- * try {
- * try {
- * // *1*
- * } catch {
- * case h1 =>
- * }
- * } catch {
- * case h2 =>
- * case h3 =>
- * try {
- *
- * } catch {
- * case h4 => // *2*
- * case h5 =>
- * }
- * }
- */
-
- // Stack of nested try blocks. Each bloc has a List of ExceptionHandler (multiple
- // catch statements). Example *1*: Stack(List(h2, h3), List(h1))
- val currentTryHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
- // Stack of nested catch blocks. The head of the list is the current catch block. The
- // tail is all following catch blocks. Example *2*: Stack(List(h3), List(h4, h5))
- val currentCatchHandlers = new mutable.Stack[List[ExceptionHandler]]()
-
- for (b <- blocks) {
-
- // are we past the current catch blocks?
- def endHandlers(): List[ExceptionHandler] = {
- var res: List[ExceptionHandler] = Nil
- if (!currentCatchHandlers.isEmpty) {
- val handler = currentCatchHandlers.top.head
- if (!handler.blocks.contains(b)) {
- // all blocks of the handler are either visited, or not part of the linearization (i.e. dead)
- assert(handler.blocks.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
- "Bad linearization of basic blocks inside catch. Found block not part of the handler\n"+
- b.fullString +"\nwhile in catch-part of\n"+ handler)
-
- val rest = currentCatchHandlers.pop.tail
- if (rest.isEmpty) {
- // all catch blocks of that exception handler are covered
- res = handler :: endHandlers()
- } else {
- // there are more catch blocks for that try (handlers covering the same)
- currentCatchHandlers.push(rest)
- beginCatchBlock(b) = rest.head
- }
- }
- }
- res
- }
- val end = endHandlers()
- if (!end.isEmpty) endExBlock(b) = end
-
- // are we past the current try block?
- if (!currentTryHandlers.isEmpty) {
- val handler = currentTryHandlers.top.head
- if (!handler.covers(b)) {
- // all of the covered blocks are visited, or not part of the linearization
- assert(handler.covered.forall(b => visitedBlocks.contains(b) || !blocks.contains(b)),
- "Bad linearization of basic blocks inside try. Found non-covered block\n"+
- b.fullString +"\nwhile in try-part of\n"+ handler)
-
- assert(handler.startBlock == b,
- "Bad linearization of basic blocks. The entry block of a catch does not directly follow the try\n"+
- b.fullString +"\n"+ handler)
-
- val handlers = currentTryHandlers.pop
- currentCatchHandlers.push(handlers)
- beginCatchBlock(b) = handler
- }
- }
-
- // are there try blocks starting at b?
- val (newHandlers, stillOpen) = openHandlers.partition(_.covers(b))
- openHandlers = stillOpen
-
- val newHandlersBySize = newHandlers.groupBy(_.covered.size)
- // big handlers first, smaller ones are nested inside the try of the big one
- // (checked by the assertions below)
- val sizes = newHandlersBySize.keys.toList.sortWith(_ > _)
-
- val beginHandlers = new mutable.ListBuffer[ExceptionHandler]
- for (s <- sizes) {
- val sHandlers = newHandlersBySize(s)
- for (h <- sHandlers) {
- assert(h.covered == sHandlers.head.covered,
- "bad nesting of exception handlers. same size, but not covering same blocks\n"+
- h +"\n"+ sHandlers.head)
- assert(h.resultKind == sHandlers.head.resultKind,
- "bad nesting of exception handlers. same size, but the same resultKind\n"+
- h +"\n"+ sHandlers.head)
- }
- for (bigger <- beginHandlers; h <- sHandlers) {
- assert(h.covered.subsetOf(bigger.covered),
- "bad nesting of exception handlers. try blocks of smaller handler are not nested in bigger one.\n"+
- h +"\n"+ bigger)
- assert(h.blocks.toSet.subsetOf(bigger.covered),
- "bad nesting of exception handlers. catch blocks of smaller handler are not nested in bigger one.\n"+
- h +"\n"+ bigger)
- }
- beginHandlers += sHandlers.head
- currentTryHandlers.push(sHandlers)
- }
- beginExBlock(b) = beginHandlers.toList
- visitedBlocks += b
- }
-
- // if there handlers left (i.e. handlers covering nothing, or a
- // non-existent (dead) block), remove their catch-blocks.
- val liveBlocks = if (openHandlers.isEmpty) blocks else {
- blocks.filter(b => openHandlers.forall(h => !h.blocks.contains(b)))
- }
-
- /** There might be open handlers, but no more blocks. happens when try/catch end
- * with `throw` or `return`
- * def foo() { try { .. throw } catch { _ => .. throw } }
- *
- * In this case we need some code after the catch block for the auto-generated
- * `leave` instruction. So we're adding a (dead) `throw new Exception`.
- */
- val rest = currentCatchHandlers.map(handlers => {
- assert(handlers.length == 1, handlers)
- handlers.head
- }).toList
-
- if (rest.isEmpty) {
- liveBlocks
- } else {
- val b = m.code.newBlock
- b.emit(Seq(
- NEW(REFERENCE(definitions.ThrowableClass)),
- DUP(REFERENCE(definitions.ObjectClass)),
- CALL_METHOD(definitions.ThrowableClass.primaryConstructor, Static(true)),
- THROW(definitions.ThrowableClass)
- ))
- b.close
- endExBlock(b) = rest
- liveBlocks ::: List(b)
- }
- }
-
- /**
- * @param block the BasicBlock to emit code for
- * @param next the following BasicBlock, `null` if `block` is the last one
- */
- def genBlock(block: BasicBlock, prev: BasicBlock, next: BasicBlock) {
-
- def loadLocalOrAddress(local: Local, msg : String , loadAddr : Boolean) {
- debuglog(msg + " for " + local)
- val isArg = local.arg
- val i = local.index
- if (isArg)
- loadArg(mcode, loadAddr)(i)
- else
- loadLocal(i, local, mcode, loadAddr)
- }
-
- def loadFieldOrAddress(field: Symbol, isStatic: Boolean, msg: String, loadAddr : Boolean) {
- debuglog(msg + " with owner: " + field.owner +
- " flags: " + Flags.flagsToString(field.owner.flags))
- var fieldInfo = fields.get(field) match {
- case Some(fInfo) => fInfo
- case None =>
- val fInfo = getType(field.owner).GetField(msilName(field))
- fields(field) = fInfo
- fInfo
- }
- if (fieldInfo.IsVolatile) {
- mcode.Emit(OpCodes.Volatile)
- }
- if (!fieldInfo.IsLiteral) {
- if (loadAddr) {
- mcode.Emit(if (isStatic) OpCodes.Ldsflda else OpCodes.Ldflda, fieldInfo)
- } else {
- mcode.Emit(if (isStatic) OpCodes.Ldsfld else OpCodes.Ldfld, fieldInfo)
- }
- } else {
- assert(!loadAddr, "can't take AddressOf a literal field (not even with readonly. prefix) because no memory was allocated to such field ...")
- // TODO the above can be overcome by loading the value, boxing, and finally unboxing. An address to a copy of the raw value will be on the stack.
- /* We perform `field inlining' as required by CLR.
- * Emit as for a CONSTANT ICode stmt, with the twist that the constant value is available
- * as a java.lang.Object and its .NET type allows constant initialization in CLR, i.e. that type
- * is one of I1, I2, I4, I8, R4, R8, CHAR, BOOLEAN, STRING, or CLASS (in this last case,
- * only accepting nullref as value). See Table 9-1 in Lidin's book on ILAsm. */
- val value = fieldInfo.getValue()
- if (value == null) {
- mcode.Emit(OpCodes.Ldnull)
- } else {
- val typ = if (fieldInfo.FieldType.IsEnum) fieldInfo.FieldType.getUnderlyingType
- else fieldInfo.FieldType
- if (typ == clrTypes.STRING) {
- mcode.Emit(OpCodes.Ldstr, value.asInstanceOf[String])
- } else if (typ == clrTypes.BOOLEAN) {
- mcode.Emit(if (value.asInstanceOf[Boolean]) OpCodes.Ldc_I4_1
- else OpCodes.Ldc_I4_0)
- } else if (typ == clrTypes.BYTE || typ == clrTypes.UBYTE) {
- loadI4(value.asInstanceOf[Byte], mcode)
- } else if (typ == clrTypes.SHORT || typ == clrTypes.USHORT) {
- loadI4(value.asInstanceOf[Int], mcode)
- } else if (typ == clrTypes.CHAR) {
- loadI4(value.asInstanceOf[Char], mcode)
- } else if (typ == clrTypes.INT || typ == clrTypes.UINT) {
- loadI4(value.asInstanceOf[Int], mcode)
- } else if (typ == clrTypes.LONG || typ == clrTypes.ULONG) {
- mcode.Emit(OpCodes.Ldc_I8, value.asInstanceOf[Long])
- } else if (typ == clrTypes.FLOAT) {
- mcode.Emit(OpCodes.Ldc_R4, value.asInstanceOf[Float])
- } else if (typ == clrTypes.DOUBLE) {
- mcode.Emit(OpCodes.Ldc_R8, value.asInstanceOf[Double])
- } else {
- /* TODO one more case is described in Partition II, 16.2: bytearray(...) */
- abort("Unknown type for static literal field: " + fieldInfo)
- }
- }
- }
- }
-
- /** Creating objects works differently on .NET. On the JVM
- * - NEW(type) => reference on Stack
- * - DUP, load arguments, CALL_METHOD(constructor)
- *
- * On .NET, the NEW and DUP are ignored, but we emit a special method call
- * - load arguments
- * - NewObj(constructor) => reference on stack
- *
- * This variable tells whether the previous instruction was a NEW,
- * we expect a DUP which is not emitted. */
- var previousWasNEW = false
-
- var lastLineNr: Int = 0
- var lastPos: Position = NoPosition
-
-
- // EndExceptionBlock must happen before MarkLabel because it adds the
- // Leave instruction. Otherwise, labels(block) points to the Leave
- // (inside the catch) instead of the instruction afterwards.
- for (handlers <- endExBlock.get(block); exh <- handlers) {
- currentHandlers.pop()
- for (l <- endFinallyLabels.get(exh))
- mcode.MarkLabel(l)
- mcode.EndExceptionBlock()
- }
-
- mcode.MarkLabel(labels(block))
- debuglog("Generating code for block: " + block)
-
- for (handler <- beginCatchBlock.get(block)) {
- if (!currentHandlers.isEmpty && currentHandlers.top.covered == handler.covered) {
- currentHandlers.pop()
- currentHandlers.push(handler)
- }
- if (handler.cls == NoSymbol) {
- // `finally` blocks are represented the same as `catch`, but with no catch-type
- mcode.BeginFinallyBlock()
- } else {
- val t = getType(handler.cls)
- mcode.BeginCatchBlock(t)
- }
- }
- for (handlers <- beginExBlock.get(block); exh <- handlers) {
- currentHandlers.push(exh)
- mcode.BeginExceptionBlock()
- }
-
- for (instr <- block) {
- try {
- val currentLineNr = instr.pos.line
- val skip = if(instr.pos.isRange) instr.pos.sameRange(lastPos) else (currentLineNr == lastLineNr);
- if(!skip || !dbFilenameSeen) {
- val fileName = if(dbFilenameSeen) "" else {dbFilenameSeen = true; ilasmFileName(clasz)};
- if(instr.pos.isRange) {
- val startLine = instr.pos.focusStart.line
- val endLine = instr.pos.focusEnd.line
- val startCol = instr.pos.focusStart.column
- val endCol = instr.pos.focusEnd.column
- mcode.setPosition(startLine, endLine, startCol, endCol, fileName)
- } else {
- mcode.setPosition(instr.pos.line, fileName)
- }
- lastLineNr = currentLineNr
- lastPos = instr.pos
- }
- } catch { case _: UnsupportedOperationException => () }
-
- if (previousWasNEW)
- assert(instr.isInstanceOf[DUP], block)
-
- instr match {
- case THIS(clasz) =>
- mcode.Emit(OpCodes.Ldarg_0)
-
- case CONSTANT(const) =>
- const.tag match {
- case UnitTag => ()
- case BooleanTag => mcode.Emit(if (const.booleanValue) OpCodes.Ldc_I4_1
- else OpCodes.Ldc_I4_0)
- case ByteTag => loadI4(const.byteValue, mcode)
- case ShortTag => loadI4(const.shortValue, mcode)
- case CharTag => loadI4(const.charValue, mcode)
- case IntTag => loadI4(const.intValue, mcode)
- case LongTag => mcode.Emit(OpCodes.Ldc_I8, const.longValue)
- case FloatTag => mcode.Emit(OpCodes.Ldc_R4, const.floatValue)
- case DoubleTag => mcode.Emit(OpCodes.Ldc_R8, const.doubleValue)
- case StringTag => mcode.Emit(OpCodes.Ldstr, const.stringValue)
- case NullTag => mcode.Emit(OpCodes.Ldnull)
- case ClazzTag =>
- mcode.Emit(OpCodes.Ldtoken, msilType(const.typeValue))
- mcode.Emit(OpCodes.Call, TYPE_FROM_HANDLE)
- case _ => abort("Unknown constant value: " + const)
- }
-
- case LOAD_ARRAY_ITEM(kind) =>
- (kind: @unchecked) match {
- case BOOL => mcode.Emit(OpCodes.Ldelem_I1)
- case BYTE => mcode.Emit(OpCodes.Ldelem_I1) // I1 for System.SByte, i.e. a scala.Byte
- case SHORT => mcode.Emit(OpCodes.Ldelem_I2)
- case CHAR => mcode.Emit(OpCodes.Ldelem_U2)
- case INT => mcode.Emit(OpCodes.Ldelem_I4)
- case LONG => mcode.Emit(OpCodes.Ldelem_I8)
- case FLOAT => mcode.Emit(OpCodes.Ldelem_R4)
- case DOUBLE => mcode.Emit(OpCodes.Ldelem_R8)
- case REFERENCE(cls) => mcode.Emit(OpCodes.Ldelem_Ref)
- case ARRAY(elem) => mcode.Emit(OpCodes.Ldelem_Ref)
-
- // case UNIT is not possible: an Array[Unit] will be an
- // Array[scala.runtime.BoxedUnit] (-> case REFERENCE)
- }
-
- case LOAD_LOCAL(local) => loadLocalOrAddress(local, "load_local", false)
-
- case CIL_LOAD_LOCAL_ADDRESS(local) => loadLocalOrAddress(local, "cil_load_local_address", true)
-
- case LOAD_FIELD(field, isStatic) => loadFieldOrAddress(field, isStatic, "load_field", false)
-
- case CIL_LOAD_FIELD_ADDRESS(field, isStatic) => loadFieldOrAddress(field, isStatic, "cil_load_field_address", true)
-
- case CIL_LOAD_ARRAY_ITEM_ADDRESS(kind) => mcode.Emit(OpCodes.Ldelema, msilType(kind))
-
- case CIL_NEWOBJ(msym) =>
- assert(msym.isClassConstructor)
- val constructorInfo: ConstructorInfo = getConstructor(msym)
- mcode.Emit(OpCodes.Newobj, constructorInfo)
-
- case LOAD_MODULE(module) =>
- debuglog("Generating LOAD_MODULE for: " + showsym(module))
- mcode.Emit(OpCodes.Ldsfld, getModuleInstanceField(module))
-
- case STORE_ARRAY_ITEM(kind) =>
- (kind: @unchecked) match {
- case BOOL => mcode.Emit(OpCodes.Stelem_I1)
- case BYTE => mcode.Emit(OpCodes.Stelem_I1)
- case SHORT => mcode.Emit(OpCodes.Stelem_I2)
- case CHAR => mcode.Emit(OpCodes.Stelem_I2)
- case INT => mcode.Emit(OpCodes.Stelem_I4)
- case LONG => mcode.Emit(OpCodes.Stelem_I8)
- case FLOAT => mcode.Emit(OpCodes.Stelem_R4)
- case DOUBLE => mcode.Emit(OpCodes.Stelem_R8)
- case REFERENCE(cls) => mcode.Emit(OpCodes.Stelem_Ref)
- case ARRAY(elem) => mcode.Emit(OpCodes.Stelem_Ref) // @TODO: test this! (occurs when calling a Array[Object]* vararg param method)
-
- // case UNIT not possible (see comment at LOAD_ARRAY_ITEM)
- }
-
- case STORE_LOCAL(local) =>
- val isArg = local.arg
- val i = local.index
- debuglog("store_local for " + local + ", index " + i)
-
- // there are some locals defined by the compiler that
- // are isArg and are need to be stored.
- if (isArg) {
- if (i >= -128 && i <= 127)
- mcode.Emit(OpCodes.Starg_S, i)
- else
- mcode.Emit(OpCodes.Starg, i)
- } else {
- i match {
- case 0 => mcode.Emit(OpCodes.Stloc_0)
- case 1 => mcode.Emit(OpCodes.Stloc_1)
- case 2 => mcode.Emit(OpCodes.Stloc_2)
- case 3 => mcode.Emit(OpCodes.Stloc_3)
- case _ =>
- if (i >= -128 && i <= 127)
- mcode.Emit(OpCodes.Stloc_S, localBuilders(local))
- else
- mcode.Emit(OpCodes.Stloc, localBuilders(local))
- }
- }
-
- case STORE_THIS(_) =>
- // this only works for impl classes because the self parameter comes first
- // in the method signature. If that changes, this code has to be revisited.
- mcode.Emit(OpCodes.Starg_S, 0)
-
- case STORE_FIELD(field, isStatic) =>
- val fieldInfo = fields.get(field) match {
- case Some(fInfo) => fInfo
- case None =>
- val fInfo = getType(field.owner).GetField(msilName(field))
- fields(field) = fInfo
- fInfo
- }
- mcode.Emit(if (isStatic) OpCodes.Stsfld else OpCodes.Stfld, fieldInfo)
-
- case CALL_PRIMITIVE(primitive) =>
- genPrimitive(primitive, instr.pos)
-
- case CALL_METHOD(msym, style) =>
- if (msym.isClassConstructor) {
- val constructorInfo: ConstructorInfo = getConstructor(msym)
- (style: @unchecked) match {
- // normal constructor calls are Static..
- case Static(_) =>
- if (method.symbol.isClassConstructor && method.symbol.owner == msym.owner)
- // we're generating a constructor (method: IMethod is a constructor), and we're
- // calling another constructor of the same class.
-
- // @LUC TODO: this can probably break, namely when having: class A { def this() { new A() } }
- // instead, we should instruct the CALL_METHOD with additional information, know whether it's
- // an instance creation constructor call or not.
- mcode.Emit(OpCodes.Call, constructorInfo)
- else
- mcode.Emit(OpCodes.Newobj, constructorInfo)
- case SuperCall(_) =>
- mcode.Emit(OpCodes.Call, constructorInfo)
- if (isStaticModule(clasz.symbol) &&
- notInitializedModules.contains(clasz.symbol) &&
- method.symbol.isClassConstructor)
- {
- notInitializedModules -= clasz.symbol
- mcode.Emit(OpCodes.Ldarg_0)
- mcode.Emit(OpCodes.Stsfld, getModuleInstanceField(clasz.symbol))
- }
- }
-
- } else {
-
- var doEmit = true
- getTypeOpt(msym.owner) match {
- case Some(typ) if (typ.IsEnum) => {
- def negBool() = {
- mcode.Emit(OpCodes.Ldc_I4_0)
- mcode.Emit(OpCodes.Ceq)
- }
- doEmit = false
- val name = msym.name
- if (name eq nme.EQ) { mcode.Emit(OpCodes.Ceq) }
- else if (name eq nme.NE) { mcode.Emit(OpCodes.Ceq); negBool }
- else if (name eq nme.LT) { mcode.Emit(OpCodes.Clt) }
- else if (name eq nme.LE) { mcode.Emit(OpCodes.Cgt); negBool }
- else if (name eq nme.GT) { mcode.Emit(OpCodes.Cgt) }
- else if (name eq nme.GE) { mcode.Emit(OpCodes.Clt); negBool }
- else if (name eq nme.OR) { mcode.Emit(OpCodes.Or) }
- else if (name eq nme.AND) { mcode.Emit(OpCodes.And) }
- else if (name eq nme.XOR) { mcode.Emit(OpCodes.Xor) }
- else
- doEmit = true
- }
- case _ => ()
- }
-
- // method: implicit view(FunctionX[PType0, PType1, ...,PTypeN, ResType]):DelegateType
- val (isDelegateView, paramType, resType) = beforeTyper {
- msym.tpe match {
- case MethodType(params, resultType)
- if (params.length == 1 && msym.name == nme.view_) =>
- val paramType = params(0).tpe
- val isDel = definitions.isCorrespondingDelegate(resultType, paramType)
- (isDel, paramType, resultType)
- case _ => (false, null, null)
- }
- }
- if (doEmit && isDelegateView) {
- doEmit = false
- createDelegateCaller(paramType, resType)
- }
-
- if (doEmit &&
- (msym.name == nme.PLUS || msym.name == nme.MINUS)
- && clrTypes.isDelegateType(msilType(msym.owner.tpe)))
- {
- doEmit = false
- val methodInfo: MethodInfo = getMethod(msym)
- // call it as a static method, even if the compiler (symbol) thinks it's virtual
- mcode.Emit(OpCodes.Call, methodInfo)
- mcode.Emit(OpCodes.Castclass, msilType(msym.owner.tpe))
- }
-
- if (doEmit && definitions.Delegate_scalaCallers.contains(msym)) {
- doEmit = false
- val methodSym: Symbol = definitions.Delegate_scalaCallerTargets(msym)
- val delegateType: Type = msym.tpe match {
- case MethodType(_, retType) => retType
- case _ => abort("not a method type: " + msym.tpe)
- }
- val methodInfo: MethodInfo = getMethod(methodSym)
- val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
- if (methodSym.isStatic) {
- mcode.Emit(OpCodes.Ldftn, methodInfo)
- } else {
- mcode.Emit(OpCodes.Dup)
- mcode.Emit(OpCodes.Ldvirtftn, methodInfo)
- }
- mcode.Emit(OpCodes.Newobj, delegCtor)
- }
-
- if (doEmit) {
- val methodInfo: MethodInfo = getMethod(msym)
- (style: @unchecked) match {
- case SuperCall(_) =>
- mcode.Emit(OpCodes.Call, methodInfo)
- case Dynamic =>
- // methodInfo.DeclaringType is null for global methods
- val isValuetypeMethod = (methodInfo.DeclaringType ne null) && (methodInfo.DeclaringType.IsValueType)
- val isValuetypeVirtualMethod = isValuetypeMethod && (methodInfo.IsVirtual)
- if (dynToStatMapped(msym)) {
- mcode.Emit(OpCodes.Call, methodInfo)
- } else if (isValuetypeVirtualMethod) {
- mcode.Emit(OpCodes.Constrained, methodInfo.DeclaringType)
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- } else if (isValuetypeMethod) {
- // otherwise error "Callvirt on a value type method" ensues
- mcode.Emit(OpCodes.Call, methodInfo)
- } else {
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- }
- case Static(_) =>
- if(methodInfo.IsVirtual && !mcode.Ldarg0WasJustEmitted) {
- mcode.Emit(OpCodes.Callvirt, methodInfo)
- } else mcode.Emit(OpCodes.Call, methodInfo)
- }
- }
- }
-
- case BOX(boxType) =>
- emitBox(mcode, boxType)
-
- case UNBOX(boxType) =>
- emitUnbox(mcode, boxType)
-
- case CIL_UNBOX(boxType) =>
- mcode.Emit(OpCodes.Unbox, msilType(boxType))
-
- case CIL_INITOBJ(valueType) =>
- mcode.Emit(OpCodes.Initobj, msilType(valueType))
-
- case NEW(REFERENCE(cls)) =>
- // the next instruction must be a DUP, see comment on `var previousWasNEW`
- previousWasNEW = true
-
- // works also for arrays and reference-types
- case CREATE_ARRAY(elem, dims) =>
- // TODO: handle multi dimensional arrays
- assert(dims == 1, "Can't handle multi dimensional arrays")
- mcode.Emit(OpCodes.Newarr, msilType(elem))
-
- // works for arrays and reference-types
- case IS_INSTANCE(tpe) =>
- mcode.Emit(OpCodes.Isinst, msilType(tpe))
- mcode.Emit(OpCodes.Ldnull)
- mcode.Emit(OpCodes.Ceq)
- mcode.Emit(OpCodes.Ldc_I4_0)
- mcode.Emit(OpCodes.Ceq)
-
- // works for arrays and reference-types
- // part from the scala reference: "S <: T does not imply
- // Array[S] <: Array[T] in Scala. However, it is possible
- // to cast an array of S to an array of T if such a cast
- // is permitted in the host environment."
- case CHECK_CAST(tpknd) =>
- val tMSIL = msilType(tpknd)
- mcode.Emit(OpCodes.Castclass, tMSIL)
-
- // no SWITCH is generated when there's
- // - a default case ("case _ => ...") in the matching expr
- // - OR is used ("case 1 | 2 => ...")
- case SWITCH(tags, branches) =>
- // tags is List[List[Int]]; a list of integers for every label.
- // if the int on stack is 4, and 4 is in the second list => jump
- // to second label
- // branches is List[BasicBlock]
- // the labels to jump to (the last one is the default one)
-
- val switchLocal = mcode.DeclareLocal(MINT)
- // several switch variables will appear with the same name in the
- // assembly code, but this makes no truble
- switchLocal.SetLocalSymInfo("$switch_var")
-
- mcode.Emit(OpCodes.Stloc, switchLocal)
- var i = 0
- for (l <- tags) {
- var targetLabel = labels(branches(i))
- for (i <- l) {
- mcode.Emit(OpCodes.Ldloc, switchLocal)
- loadI4(i, mcode)
- mcode.Emit(OpCodes.Beq, targetLabel)
- }
- i += 1
- }
- val defaultTarget = labels(branches(i))
- if (next != branches(i))
- mcode.Emit(OpCodes.Br, defaultTarget)
-
- case JUMP(whereto) =>
- val (leaveHandler, leaveFinally, lfTarget) = leavesHandler(block, whereto)
- if (leaveHandler) {
- if (leaveFinally) {
- if (lfTarget.isDefined) mcode.Emit(OpCodes.Leave, lfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(whereto))
- } else if (next != whereto)
- mcode.Emit(OpCodes.Br, labels(whereto))
-
- case CJUMP(success, failure, cond, kind) =>
- // cond is TestOp (see Primitives.scala), and can take
- // values EQ, NE, LT, GE LE, GT
- // kind is TypeKind
- val isFloat = kind == FLOAT || kind == DOUBLE
- val emit = (c: TestOp, l: Label) => emitBr(c, l, isFloat)
- emitCondBr(block, cond, success, failure, next, emit)
-
- case CZJUMP(success, failure, cond, kind) =>
- emitCondBr(block, cond, success, failure, next, emitBrBool(_, _))
-
- case RETURN(kind) =>
- if (currentHandlers.isEmpty)
- mcode.Emit(OpCodes.Ret)
- else {
- val (local, label) = returnFromHandler(kind)
- if (kind != UNIT)
- mcode.Emit(OpCodes.Stloc, local)
- mcode.Emit(OpCodes.Leave, label)
- }
-
- case THROW(_) =>
- mcode.Emit(OpCodes.Throw)
-
- case DROP(kind) =>
- mcode.Emit(OpCodes.Pop)
-
- case DUP(kind) =>
- // see comment on `var previousWasNEW`
- if (!previousWasNEW)
- mcode.Emit(OpCodes.Dup)
- else
- previousWasNEW = false
-
- case MONITOR_ENTER() =>
- mcode.Emit(OpCodes.Call, MMONITOR_ENTER)
-
- case MONITOR_EXIT() =>
- mcode.Emit(OpCodes.Call, MMONITOR_EXIT)
-
- case SCOPE_ENTER(_) | SCOPE_EXIT(_) | LOAD_EXCEPTION(_) =>
- ()
- }
-
- } // end for (instr <- b) { .. }
- } // end genBlock
-
- def genPrimitive(primitive: Primitive, pos: Position) {
- primitive match {
- case Negation(kind) =>
- kind match {
- // CHECK: is ist possible to get this for BOOL? in this case, verify.
- case BOOL | BYTE | CHAR | SHORT | INT | LONG | FLOAT | DOUBLE =>
- mcode.Emit(OpCodes.Neg)
-
- case _ => abort("Impossible to negate a " + kind)
- }
-
- case Arithmetic(op, kind) =>
- op match {
- case ADD => mcode.Emit(OpCodes.Add)
- case SUB => mcode.Emit(OpCodes.Sub)
- case MUL => mcode.Emit(OpCodes.Mul)
- case DIV => mcode.Emit(OpCodes.Div)
- case REM => mcode.Emit(OpCodes.Rem)
- case NOT => mcode.Emit(OpCodes.Not) //bitwise complement (one's complement)
- case _ => abort("Unknown arithmetic primitive " + primitive )
- }
-
- case Logical(op, kind) => op match {
- case AND => mcode.Emit(OpCodes.And)
- case OR => mcode.Emit(OpCodes.Or)
- case XOR => mcode.Emit(OpCodes.Xor)
- }
-
- case Shift(op, kind) => op match {
- case LSL => mcode.Emit(OpCodes.Shl)
- case ASR => mcode.Emit(OpCodes.Shr)
- case LSR => mcode.Emit(OpCodes.Shr_Un)
- }
-
- case Conversion(src, dst) =>
- debuglog("Converting from: " + src + " to: " + dst)
-
- dst match {
- case BYTE => mcode.Emit(OpCodes.Conv_I1) // I1 for System.SByte, i.e. a scala.Byte
- case SHORT => mcode.Emit(OpCodes.Conv_I2)
- case CHAR => mcode.Emit(OpCodes.Conv_U2)
- case INT => mcode.Emit(OpCodes.Conv_I4)
- case LONG => mcode.Emit(OpCodes.Conv_I8)
- case FLOAT => mcode.Emit(OpCodes.Conv_R4)
- case DOUBLE => mcode.Emit(OpCodes.Conv_R8)
- case _ =>
- Console.println("Illegal conversion at: " + clasz +
- " at: " + pos.source + ":" + pos.line)
- }
-
- case ArrayLength(_) =>
- mcode.Emit(OpCodes.Ldlen)
-
- case StartConcat =>
- mcode.Emit(OpCodes.Newobj, MSTRING_BUILDER_CONSTR)
-
-
- case StringConcat(el) =>
- val elemType : MsilType = el match {
- case REFERENCE(_) | ARRAY(_) => MOBJECT
- case _ => msilType(el)
- }
-
- val argTypes:Array[MsilType] = Array(elemType)
- val stringBuilderAppend = MSTRING_BUILDER.GetMethod("Append", argTypes )
- mcode.Emit(OpCodes.Callvirt, stringBuilderAppend)
-
- case EndConcat =>
- mcode.Emit(OpCodes.Callvirt, MSTRING_BUILDER_TOSTRING)
-
- case _ =>
- abort("Unimplemented primitive " + primitive)
- }
- } // end genPrimitive
-
-
- ////////////////////// loading ///////////////////////
-
- def loadI4(value: Int, code: ILGenerator): Unit = value match {
- case -1 => code.Emit(OpCodes.Ldc_I4_M1)
- case 0 => code.Emit(OpCodes.Ldc_I4_0)
- case 1 => code.Emit(OpCodes.Ldc_I4_1)
- case 2 => code.Emit(OpCodes.Ldc_I4_2)
- case 3 => code.Emit(OpCodes.Ldc_I4_3)
- case 4 => code.Emit(OpCodes.Ldc_I4_4)
- case 5 => code.Emit(OpCodes.Ldc_I4_5)
- case 6 => code.Emit(OpCodes.Ldc_I4_6)
- case 7 => code.Emit(OpCodes.Ldc_I4_7)
- case 8 => code.Emit(OpCodes.Ldc_I4_8)
- case _ =>
- if (value >= -128 && value <= 127)
- code.Emit(OpCodes.Ldc_I4_S, value)
- else
- code.Emit(OpCodes.Ldc_I4, value)
- }
-
- def loadArg(code: ILGenerator, loadAddr: Boolean)(i: Int) =
- if (loadAddr) {
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldarga_S, i)
- else
- code.Emit(OpCodes.Ldarga, i)
- } else {
- i match {
- case 0 => code.Emit(OpCodes.Ldarg_0)
- case 1 => code.Emit(OpCodes.Ldarg_1)
- case 2 => code.Emit(OpCodes.Ldarg_2)
- case 3 => code.Emit(OpCodes.Ldarg_3)
- case _ =>
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldarg_S, i)
- else
- code.Emit(OpCodes.Ldarg, i)
- }
- }
-
- def loadLocal(i: Int, local: Local, code: ILGenerator, loadAddr: Boolean) =
- if (loadAddr) {
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldloca_S, localBuilders(local))
- else
- code.Emit(OpCodes.Ldloca, localBuilders(local))
- } else {
- i match {
- case 0 => code.Emit(OpCodes.Ldloc_0)
- case 1 => code.Emit(OpCodes.Ldloc_1)
- case 2 => code.Emit(OpCodes.Ldloc_2)
- case 3 => code.Emit(OpCodes.Ldloc_3)
- case _ =>
- if (i >= -128 && i <= 127)
- code.Emit(OpCodes.Ldloc_S, localBuilders(local))
- else
- code.Emit(OpCodes.Ldloc, localBuilders(local))
- }
- }
-
- ////////////////////// branches ///////////////////////
-
- /** Returns a Triple (Boolean, Boolean, Option[Label])
- * - whether the jump leaves some exception block (try / catch / finally)
- * - whether it leaves a finally handler (finally block, but not it's try / catch)
- * - a label where to jump for leaving the finally handler
- * . None to leave directly using `endfinally`
- * . Some(label) to emit `leave label` (for try / catch inside a finally handler)
- */
- def leavesHandler(from: BasicBlock, to: BasicBlock): (Boolean, Boolean, Option[Label]) =
- if (currentHandlers.isEmpty) (false, false, None)
- else {
- val h = currentHandlers.head
- val leaveHead = { h.covers(from) != h.covers(to) ||
- h.blocks.contains(from) != h.blocks.contains(to) }
- if (leaveHead) {
- // we leave the innermost exception block.
- // find out if we also leave som e `finally` handler
- currentHandlers.find(e => {
- e.cls == NoSymbol && e.blocks.contains(from) != e.blocks.contains(to)
- }) match {
- case Some(finallyHandler) =>
- if (h == finallyHandler) {
- // the finally handler is the innermost, so we can emit `endfinally` directly
- (true, true, None)
- } else {
- // we need to `Leave` to the `endfinally` of the next outer finally handler
- val l = endFinallyLabels.getOrElseUpdate(finallyHandler, mcode.DefineLabel())
- (true, true, Some(l))
- }
- case None =>
- (true, false, None)
- }
- } else (false, false, None)
- }
-
- def emitCondBr(block: BasicBlock, cond: TestOp, success: BasicBlock, failure: BasicBlock,
- next: BasicBlock, emitBrFun: (TestOp, Label) => Unit) {
- val (sLeaveHandler, sLeaveFinally, slfTarget) = leavesHandler(block, success)
- val (fLeaveHandler, fLeaveFinally, flfTarget) = leavesHandler(block, failure)
-
- if (sLeaveHandler || fLeaveHandler) {
- val sLabelOpt = if (sLeaveHandler) {
- val leaveSLabel = mcode.DefineLabel()
- emitBrFun(cond, leaveSLabel)
- Some(leaveSLabel)
- } else {
- emitBrFun(cond, labels(success))
- None
- }
-
- if (fLeaveHandler) {
- if (fLeaveFinally) {
- if (flfTarget.isDefined) mcode.Emit(OpCodes.Leave, flfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(failure))
- } else
- mcode.Emit(OpCodes.Br, labels(failure))
-
- sLabelOpt.map(l => {
- mcode.MarkLabel(l)
- if (sLeaveFinally) {
- if (slfTarget.isDefined) mcode.Emit(OpCodes.Leave, slfTarget.get)
- else mcode.Emit(OpCodes.Endfinally)
- } else
- mcode.Emit(OpCodes.Leave, labels(success))
- })
- } else {
- if (next == success) {
- emitBrFun(cond.negate, labels(failure))
- } else {
- emitBrFun(cond, labels(success))
- if (next != failure) {
- mcode.Emit(OpCodes.Br, labels(failure))
- }
- }
- }
- }
-
- def emitBr(condition: TestOp, dest: Label, isFloat: Boolean) {
- condition match {
- case EQ => mcode.Emit(OpCodes.Beq, dest)
- case NE => mcode.Emit(OpCodes.Bne_Un, dest)
- case LT => mcode.Emit(if (isFloat) OpCodes.Blt_Un else OpCodes.Blt, dest)
- case GE => mcode.Emit(if (isFloat) OpCodes.Bge_Un else OpCodes.Bge, dest)
- case LE => mcode.Emit(if (isFloat) OpCodes.Ble_Un else OpCodes.Ble, dest)
- case GT => mcode.Emit(if (isFloat) OpCodes.Bgt_Un else OpCodes.Bgt, dest)
- }
- }
-
- def emitBrBool(cond: TestOp, dest: Label) {
- (cond: @unchecked) match {
- // EQ -> Brfalse, NE -> Brtrue; this is because we come from
- // a CZJUMP. If the value on the stack is 0 (e.g. a boolean
- // method returned false), and we are in the case EQ, then
- // we need to emit Brfalse (EQ Zero means false). vice versa
- case EQ => mcode.Emit(OpCodes.Brfalse, dest)
- case NE => mcode.Emit(OpCodes.Brtrue, dest)
- }
- }
-
- ////////////////////// local vars ///////////////////////
-
- /**
- * Compute the indexes of each local variable of the given
- * method.
- */
- def computeLocalVarsIndex(m: IMethod) {
- var idx = if (m.symbol.isStaticMember) 0 else 1
-
- val params = m.params
- for (l <- params) {
- debuglog("Index value for parameter " + l + ": " + idx)
- l.index = idx
- idx += 1 // sizeOf(l.kind)
- }
-
- val locvars = m.locals filterNot (params contains)
- idx = 0
-
- for (l <- locvars) {
- debuglog("Index value for local variable " + l + ": " + idx)
- l.index = idx
- idx += 1 // sizeOf(l.kind)
- }
-
- }
-
- ////////////////////// Utilities ////////////////////////
-
- /** Return the a name of this symbol that can be used on the .NET
- * platform. It removes spaces from names.
- *
- * Special handling: scala.All and scala.AllRef are 'erased' to
- * scala.All$ and scala.AllRef$. This is needed because they are
- * not real classes, and they mean 'abrupt termination upon evaluation
- * of that expression' or 'null' respectively. This handling is
- * done already in GenICode, but here we need to remove references
- * from method signatures to these types, because such classes can
- * not exist in the classpath: the type checker will be very confused.
- */
- def msilName(sym: Symbol): String = {
- val suffix = sym.moduleSuffix
- // Flags.JAVA: "symbol was not defined by a scala-class" (java, or .net-class)
-
- if (sym == definitions.NothingClass)
- return "scala.runtime.Nothing$"
- else if (sym == definitions.NullClass)
- return "scala.runtime.Null$"
-
- (if (sym.isClass || (sym.isModule && !sym.isMethod)) {
- if (sym.isNestedClass) sym.simpleName
- else sym.fullName
- } else
- sym.simpleName.toString.trim()) + suffix
- }
-
-
- ////////////////////// flags ///////////////////////
-
- def msilTypeFlags(sym: Symbol): Int = {
- var mf: Int = TypeAttributes.AutoLayout | TypeAttributes.AnsiClass
-
- if(sym.isNestedClass) {
- mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NestedPrivate else TypeAttributes.NestedPublic)
- } else {
- mf = mf | (if (sym hasFlag Flags.PRIVATE) TypeAttributes.NotPublic else TypeAttributes.Public)
- }
- mf = mf | (if (sym hasFlag Flags.ABSTRACT) TypeAttributes.Abstract else 0)
- mf = mf | (if (sym.isTrait && !sym.isImplClass) TypeAttributes.Interface else TypeAttributes.Class)
- mf = mf | (if (sym isFinal) TypeAttributes.Sealed else 0)
-
- sym.annotations foreach { a => a match {
- case AnnotationInfo(SerializableAttr, _, _) =>
- // TODO: add the Serializable TypeAttribute also if the annotation
- // System.SerializableAttribute is present (.net annotation, not scala)
- // Best way to do it: compare with
- // definitions.getClass("System.SerializableAttribute").tpe
- // when frontend available
- mf = mf | TypeAttributes.Serializable
- case _ => ()
- }}
-
- mf
- // static: not possible (or?)
- }
-
- def msilMethodFlags(sym: Symbol): Short = {
- var mf: Int = MethodAttributes.HideBySig |
- (if (sym hasFlag Flags.PRIVATE) MethodAttributes.Private
- else MethodAttributes.Public)
-
- if (!sym.isClassConstructor) {
- if (sym.isStaticMember)
- mf = mf | FieldAttributes.Static // coincidentally, same value as for MethodAttributes.Static ...
- else {
- mf = mf | MethodAttributes.Virtual
- if (sym.isFinal && !getType(sym.owner).IsInterface)
- mf = mf | MethodAttributes.Final
- if (sym.isDeferred || getType(sym.owner).IsInterface)
- mf = mf | MethodAttributes.Abstract
- }
- }
-
- if (sym.isStaticMember) {
- mf = mf | MethodAttributes.Static
- }
-
- // constructors of module classes should be private
- if (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) {
- mf |= MethodAttributes.Private
- mf &= ~(MethodAttributes.Public)
- }
-
- mf.toShort
- }
-
- def msilFieldFlags(sym: Symbol): Short = {
- var mf: Int =
- if (sym hasFlag Flags.PRIVATE) FieldAttributes.Private
- else if (sym hasFlag Flags.PROTECTED) FieldAttributes.FamORAssem
- else FieldAttributes.Public
-
- if (sym hasFlag Flags.FINAL)
- mf = mf | FieldAttributes.InitOnly
-
- if (sym.isStaticMember)
- mf = mf | FieldAttributes.Static
-
- // TRANSIENT: "not serialized", VOLATILE: doesn't exist on .net
- // TODO: add this annotation also if the class has the custom attribute
- // System.NotSerializedAttribute
- sym.annotations.foreach( a => a match {
- case AnnotationInfo(TransientAtt, _, _) =>
- mf = mf | FieldAttributes.NotSerialized
- case _ => ()
- })
-
- mf.toShort
- }
-
- ////////////////////// builders, types ///////////////////////
-
- var entryPoint: Symbol = _
-
- val notInitializedModules = mutable.HashSet[Symbol]()
-
- // TODO: create fields also in def createType, and not in genClass,
- // add a getField method (it only works as it is because fields never
- // accessed from outside a class)
-
- val localBuilders = mutable.HashMap[Local, LocalBuilder]()
-
- private[GenMSIL] def findEntryPoint(cls: IClass) {
-
- def isEntryPoint(sym: Symbol):Boolean = {
- if (isStaticModule(sym.owner) && msilName(sym) == "main")
- if (sym.tpe.paramTypes.length == 1) {
- toTypeKind(sym.tpe.paramTypes(0)) match {
- case ARRAY(elem) =>
- if (elem.toType.typeSymbol == definitions.StringClass) {
- return true
- }
- case _ => ()
- }
- }
- false
- }
-
- if((entryPoint == null) && opt.showClass.isDefined) { // TODO introduce dedicated setting instead
- val entryclass = opt.showClass.get.toString
- val cfn = cls.symbol.fullName
- if(cfn == entryclass) {
- for (m <- cls.methods; if isEntryPoint(m.symbol)) { entryPoint = m.symbol }
- if(entryPoint == null) { warning("Couldn't find main method in class " + cfn) }
- }
- }
-
- if (firstSourceName == "")
- if (cls.symbol.sourceFile != null) // is null for nested classes
- firstSourceName = cls.symbol.sourceFile.name
- }
-
- // #####################################################################
- // get and create types
-
- private def msilType(t: TypeKind): MsilType = (t: @unchecked) match {
- case UNIT => MVOID
- case BOOL => MBOOL
- case BYTE => MBYTE
- case SHORT => MSHORT
- case CHAR => MCHAR
- case INT => MINT
- case LONG => MLONG
- case FLOAT => MFLOAT
- case DOUBLE => MDOUBLE
- case REFERENCE(cls) => getType(cls)
- case ARRAY(elem) =>
- msilType(elem) match {
- // For type builders, cannot call "clrTypes.mkArrayType" because this looks up
- // the type "tp" in the assembly (not in the HashMap "types" of the backend).
- // This can fail for nested types because the builders are not complete yet.
- case tb: TypeBuilder => tb.MakeArrayType()
- case tp: MsilType => clrTypes.mkArrayType(tp)
- }
- }
-
- private def msilType(tpe: Type): MsilType = msilType(toTypeKind(tpe))
-
- private def msilParamTypes(sym: Symbol): Array[MsilType] = {
- sym.tpe.paramTypes.map(msilType).toArray
- }
-
- def getType(sym: Symbol) = getTypeOpt(sym).getOrElse(abort(showsym(sym)))
-
- /**
- * Get an MSIL type from a symbol. First look in the clrTypes.types map, then
- * lookup the name using clrTypes.getType
- */
- def getTypeOpt(sym: Symbol): Option[MsilType] = {
- val tmp = types.get(sym)
- tmp match {
- case typ @ Some(_) => typ
- case None =>
- def typeString(sym: Symbol): String = {
- val s = if (sym.isNestedClass) typeString(sym.owner) +"+"+ sym.simpleName
- else sym.fullName
- if (sym.isModuleClass && !sym.isTrait) s + "$" else s
- }
- val name = typeString(sym)
- val typ = clrTypes.getType(name)
- if (typ == null)
- None
- else {
- types(sym) = typ
- Some(typ)
- }
- }
- }
-
- def mapType(sym: Symbol, mType: MsilType) {
- assert(mType != null, showsym(sym))
- types(sym) = mType
- }
-
- def createTypeBuilder(iclass: IClass) {
- /**
- * First look in the clrTypes.types map, if that fails check if it's a class being compiled, otherwise
- * lookup by name (clrTypes.getType calls the static method msil.Type.GetType(fullname)).
- */
- def msilTypeFromSym(sym: Symbol): MsilType = {
- types.get(sym).getOrElse {
- classes.get(sym) match {
- case Some(iclass) =>
- msilTypeBuilderFromSym(sym)
- case None =>
- getType(sym)
- }
- }
- }
-
- def msilTypeBuilderFromSym(sym: Symbol): TypeBuilder = {
- if(!(types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])){
- val iclass = classes(sym)
- assert(iclass != null)
- createTypeBuilder(iclass)
- }
- types(sym).asInstanceOf[TypeBuilder]
- }
-
- val sym = iclass.symbol
- if (types.contains(sym) && types(sym).isInstanceOf[TypeBuilder])
- return
-
- def isInterface(s: Symbol) = s.isTrait && !s.isImplClass
- val parents: List[Type] =
- if (sym.info.parents.isEmpty) List(definitions.ObjectClass.tpe)
- else sym.info.parents.distinct
-
- val superType : MsilType = if (isInterface(sym)) null else msilTypeFromSym(parents.head.typeSymbol)
- debuglog("super type: " + parents(0).typeSymbol + ", msil type: " + superType)
-
- val interfaces: Array[MsilType] =
- parents.tail.map(p => msilTypeFromSym(p.typeSymbol)).toArray
- if (parents.length > 1) {
- if (settings.debug.value) {
- log("interfaces:")
- for (i <- 0.until(interfaces.length)) {
- log(" type: " + parents(i + 1).typeSymbol + ", msil type: " + interfaces(i))
- }
- }
- }
-
- val tBuilder = if (sym.isNestedClass) {
- val ownerT = msilTypeBuilderFromSym(sym.owner).asInstanceOf[TypeBuilder]
- ownerT.DefineNestedType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
- } else {
- mmodule.DefineType(msilName(sym), msilTypeFlags(sym), superType, interfaces)
- }
- mapType(sym, tBuilder)
- } // createTypeBuilder
-
- def createClassMembers(iclass: IClass) {
- try {
- createClassMembers0(iclass)
- }
- catch {
- case e: Throwable =>
- java.lang.System.err.println(showsym(iclass.symbol))
- java.lang.System.err.println("with methods = " + iclass.methods)
- throw e
- }
- }
-
- def createClassMembers0(iclass: IClass) {
-
- val mtype = getType(iclass.symbol).asInstanceOf[TypeBuilder]
-
- for (ifield <- iclass.fields) {
- val sym = ifield.symbol
- debuglog("Adding field: " + sym.fullName)
-
- var attributes = msilFieldFlags(sym)
- val fieldTypeWithCustomMods =
- new PECustomMod(msilType(sym.tpe),
- customModifiers(sym.annotations))
- val fBuilder = mtype.DefineField(msilName(sym),
- fieldTypeWithCustomMods,
- attributes)
- fields(sym) = fBuilder
- addAttributes(fBuilder, sym.annotations)
- } // all iclass.fields iterated over
-
- if (isStaticModule(iclass.symbol)) {
- val sc = iclass.lookupStaticCtor
- if (sc.isDefined) {
- val m = sc.get
- val oldLastBlock = m.lastBlock
- val lastBlock = m.newBlock()
- oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
- // call object's private ctor from static ctor
- lastBlock.emit(CIL_NEWOBJ(iclass.symbol.primaryConstructor))
- lastBlock.emit(DROP(toTypeKind(iclass.symbol.tpe)))
- lastBlock emit RETURN(UNIT)
- lastBlock.close
- }
- }
-
- if (iclass.symbol != definitions.ArrayClass) {
- for (m: IMethod <- iclass.methods) {
- val sym = m.symbol
- debuglog("Creating MethodBuilder for " + Flags.flagsToString(sym.flags) + " " +
- sym.owner.fullName + "::" + sym.name)
-
- val ownerType = getType(sym.enclClass).asInstanceOf[TypeBuilder]
- assert(mtype == ownerType, "mtype = " + mtype + "; ownerType = " + ownerType)
- var paramTypes = msilParamTypes(sym)
- val attr = msilMethodFlags(sym)
-
- if (m.symbol.isClassConstructor) {
- val constr =
- ownerType.DefineConstructor(attr, CallingConventions.Standard, paramTypes)
- for (i <- 0.until(paramTypes.length)) {
- constr.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
- }
- mapConstructor(sym, constr)
- addAttributes(constr, sym.annotations)
- } else {
- var resType = msilType(m.returnType)
- val method =
- ownerType.DefineMethod(msilName(sym), attr, resType, paramTypes)
- for (i <- 0.until(paramTypes.length)) {
- method.DefineParameter(i, ParameterAttributes.None, msilName(m.params(i).sym))
- }
- if (!methods.contains(sym))
- mapMethod(sym, method)
- addAttributes(method, sym.annotations)
- debuglog("\t created MethodBuilder " + method)
- }
- }
- } // method builders created for non-array iclass
-
- if (isStaticModule(iclass.symbol)) {
- addModuleInstanceField(iclass.symbol)
- notInitializedModules += iclass.symbol
- if (iclass.lookupStaticCtor.isEmpty) {
- addStaticInit(iclass.symbol)
- }
- }
-
- } // createClassMembers0
-
- private def isTopLevelModule(sym: Symbol): Boolean =
- beforeRefchecks {
- sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass
- }
-
- // if the module is lifted it does not need to be initialized in
- // its static constructor, and the MODULE$ field is not required.
- // the outer class will care about it.
- private def isStaticModule(sym: Symbol): Boolean = {
- // .net inner classes: removed '!sym.hasFlag(Flags.LIFTED)', added
- // 'sym.isStatic'. -> no longer compatible without skipping flatten!
- sym.isModuleClass && sym.isStatic && !sym.isImplClass
- }
-
- private def isCloneable(sym: Symbol): Boolean = {
- !sym.annotations.forall( a => a match {
- case AnnotationInfo(CloneableAttr, _, _) => false
- case _ => true
- })
- }
-
- private def addModuleInstanceField(sym: Symbol) {
- debuglog("Adding Module-Instance Field for " + showsym(sym))
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
- val fb = tBuilder.DefineField(MODULE_INSTANCE_NAME,
- tBuilder,
- (FieldAttributes.Public |
- //FieldAttributes.InitOnly |
- FieldAttributes.Static).toShort)
- fields(sym) = fb
- }
-
-
- // the symbol may be a object-symbol (module-symbol), or a module-class-symbol
- private def getModuleInstanceField(sym: Symbol): FieldInfo = {
- assert(sym.isModule || sym.isModuleClass, "Expected module: " + showsym(sym))
-
- // when called by LOAD_MODULE, the corresponding type maybe doesn't
- // exist yet -> make a getType
- val moduleClassSym = if (sym.isModule) sym.moduleClass else sym
-
- // TODO: get module field for modules not defined in the
- // source currently compiling (e.g. Console)
-
- fields get moduleClassSym match {
- case Some(sym) => sym
- case None =>
- //val mclass = types(moduleClassSym)
- val nameInMetadata = nestingAwareFullClassname(moduleClassSym)
- val mClass = clrTypes.getType(nameInMetadata)
- val mfield = mClass.GetField("MODULE$")
- assert(mfield ne null, "module not found " + showsym(moduleClassSym))
- fields(moduleClassSym) = mfield
- mfield
- }
-
- //fields(moduleClassSym)
- }
-
- def nestingAwareFullClassname(csym: Symbol) : String = {
- val suffix = csym.moduleSuffix
- val res = if (csym.isNestedClass)
- nestingAwareFullClassname(csym.owner) + "+" + csym.encodedName
- else
- csym.fullName
- res + suffix
- }
-
- /** Adds a static initializer which creates an instance of the module
- * class (calls the primary constructor). A special primary constructor
- * will be generated (notInitializedModules) which stores the new instance
- * in the MODULE$ field right after the super call.
- */
- private def addStaticInit(sym: Symbol) {
- val tBuilder = getType(sym).asInstanceOf[TypeBuilder]
-
- val staticInit = tBuilder.DefineConstructor(
- (MethodAttributes.Static | MethodAttributes.Public).toShort,
- CallingConventions.Standard,
- MsilType.EmptyTypes)
-
- val sicode = staticInit.GetILGenerator()
-
- val instanceConstructor = constructors(sym.primaryConstructor)
-
- // there are no constructor parameters. assuming the constructor takes no parameter
- // is fine: we call (in the static constructor) the constructor of the module class,
- // which takes no arguments - an object definition cannot take constructor arguments.
- sicode.Emit(OpCodes.Newobj, instanceConstructor)
- // the stsfld is done in the instance constructor, just after the super call.
- sicode.Emit(OpCodes.Pop)
-
- sicode.Emit(OpCodes.Ret)
- }
-
- private def generateMirrorClass(sym: Symbol) {
- val tBuilder = getType(sym)
- assert(sym.isModuleClass, "Can't generate Mirror-Class for the Non-Module class " + sym)
- debuglog("Dumping mirror class for object: " + sym)
- val moduleName = msilName(sym)
- val mirrorName = moduleName.substring(0, moduleName.length() - 1)
- val mirrorTypeBuilder = mmodule.DefineType(mirrorName,
- TypeAttributes.Class |
- TypeAttributes.Public |
- TypeAttributes.Sealed,
- MOBJECT,
- MsilType.EmptyTypes)
-
- val iclass = classes(sym)
-
- for (m <- sym.tpe.nonPrivateMembers
- if m.owner != definitions.ObjectClass && !m.isProtected &&
- m.isMethod && !m.isClassConstructor && !m.isStaticMember && !m.isCase &&
- !m.isDeferred)
- {
- debuglog(" Mirroring method: " + m)
- val paramTypes = msilParamTypes(m)
- val paramNames: Array[String] = new Array[String](paramTypes.length)
- for (i <- 0 until paramTypes.length)
- paramNames(i) = "x_" + i
-
- // CHECK: verify if getMethodName is better than msilName
- val mirrorMethod = mirrorTypeBuilder.DefineMethod(msilName(m),
- (MethodAttributes.Public |
- MethodAttributes.Static).toShort,
- msilType(m.tpe.resultType),
- paramTypes)
-
- var i = 0
- while (i < paramTypes.length) {
- mirrorMethod.DefineParameter(i, ParameterAttributes.None, paramNames(i))
- i += 1
- }
-
- val mirrorCode = mirrorMethod.GetILGenerator()
- mirrorCode.Emit(OpCodes.Ldsfld, getModuleInstanceField(sym))
- val mInfo = getMethod(m)
- for (paramidx <- 0.until(paramTypes.length)) {
- val mInfoParams = mInfo.GetParameters
- val loadAddr = mInfoParams(paramidx).ParameterType.IsByRef
- loadArg(mirrorCode, loadAddr)(paramidx)
- }
-
- mirrorCode.Emit(OpCodes.Callvirt, getMethod(m))
- mirrorCode.Emit(OpCodes.Ret)
- }
-
- addSymtabAttribute(sym.sourceModule, mirrorTypeBuilder)
-
- mirrorTypeBuilder.CreateType()
- mirrorTypeBuilder.setSourceFilepath(iclass.cunit.source.file.path)
- }
-
-
- // #####################################################################
- // delegate callers
-
- var delegateCallers: TypeBuilder = _
- var nbDelegateCallers: Int = 0
-
- private def initDelegateCallers() = {
- delegateCallers = mmodule.DefineType("$DelegateCallers", TypeAttributes.Public |
- TypeAttributes.Sealed)
- }
-
- private def createDelegateCaller(functionType: Type, delegateType: Type) = {
- if (delegateCallers == null)
- initDelegateCallers()
- // create a field an store the function-object
- val mFunctionType: MsilType = msilType(functionType)
- val anonfunField: FieldBuilder = delegateCallers.DefineField(
- "$anonfunField$$" + nbDelegateCallers, mFunctionType,
- (FieldAttributes.InitOnly | FieldAttributes.Public | FieldAttributes.Static).toShort)
- mcode.Emit(OpCodes.Stsfld, anonfunField)
-
-
- // create the static caller method and the delegate object
- val (params, returnType) = delegateType.member(nme.apply).tpe match {
- case MethodType(delParams, delReturn) => (delParams, delReturn)
- case _ => abort("not a delegate type: " + delegateType)
- }
- val caller: MethodBuilder = delegateCallers.DefineMethod(
- "$delegateCaller$$" + nbDelegateCallers,
- (MethodAttributes.Final | MethodAttributes.Public | MethodAttributes.Static).toShort,
- msilType(returnType), (params map (_.tpe)).map(msilType).toArray)
- for (i <- 0 until params.length)
- caller.DefineParameter(i, ParameterAttributes.None, "arg" + i) // FIXME: use name of parameter symbol
- val delegCtor = msilType(delegateType).GetConstructor(Array(MOBJECT, INT_PTR))
- mcode.Emit(OpCodes.Ldnull)
- mcode.Emit(OpCodes.Ldftn, caller)
- mcode.Emit(OpCodes.Newobj, delegCtor)
-
-
- // create the static caller method body
- val functionApply: MethodInfo = getMethod(functionType.member(nme.apply))
- val dcode: ILGenerator = caller.GetILGenerator()
- dcode.Emit(OpCodes.Ldsfld, anonfunField)
- for (i <- 0 until params.length) {
- loadArg(dcode, false /* TODO confirm whether passing actual as-is to formal is correct wrt the ByRef attribute of the param */)(i)
- emitBox(dcode, toTypeKind(params(i).tpe))
- }
- dcode.Emit(OpCodes.Callvirt, functionApply)
- emitUnbox(dcode, toTypeKind(returnType))
- dcode.Emit(OpCodes.Ret)
-
- nbDelegateCallers = nbDelegateCallers + 1
-
- } //def createDelegateCaller
-
- def emitBox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
- // doesn't make sense, unit as parameter..
- case UNIT => code.Emit(OpCodes.Ldsfld, boxedUnit)
- case BOOL | BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE =>
- code.Emit(OpCodes.Box, msilType(boxType))
- case REFERENCE(cls) if clrTypes.isValueType(cls) =>
- code.Emit(OpCodes.Box, (msilType(boxType)))
- case REFERENCE(_) | ARRAY(_) =>
- warning("Tried to BOX a non-valuetype.")
- ()
- }
-
- def emitUnbox(code: ILGenerator, boxType: TypeKind) = (boxType: @unchecked) match {
- case UNIT => code.Emit(OpCodes.Pop)
- /* (1) it's essential to keep the code emitted here (as of now plain calls to System.Convert.ToBlaBla methods)
- behaviorally.equiv.wrt. BoxesRunTime.unboxToBlaBla methods
- (case null: that's easy, case boxed: track changes to unboxBlaBla)
- (2) See also: asInstanceOf to cast from Any to number,
- tracked in http://lampsvn.epfl.ch/trac/scala/ticket/4437 */
- case BOOL => code.Emit(OpCodes.Call, toBool)
- case BYTE => code.Emit(OpCodes.Call, toSByte)
- case SHORT => code.Emit(OpCodes.Call, toShort)
- case CHAR => code.Emit(OpCodes.Call, toChar)
- case INT => code.Emit(OpCodes.Call, toInt)
- case LONG => code.Emit(OpCodes.Call, toLong)
- case FLOAT => code.Emit(OpCodes.Call, toFloat)
- case DOUBLE => code.Emit(OpCodes.Call, toDouble)
- case REFERENCE(cls) if clrTypes.isValueType(cls) =>
- code.Emit(OpCodes.Unbox, msilType(boxType))
- code.Emit(OpCodes.Ldobj, msilType(boxType))
- case REFERENCE(_) | ARRAY(_) =>
- warning("Tried to UNBOX a non-valuetype.")
- ()
- }
-
- // #####################################################################
- // get and create methods / constructors
-
- def getConstructor(sym: Symbol): ConstructorInfo = constructors.get(sym) match {
- case Some(constr) => constr
- case None =>
- val mClass = getType(sym.owner)
- val constr = mClass.GetConstructor(msilParamTypes(sym))
- if (constr eq null) {
- java.lang.System.out.println("Cannot find constructor " + sym.owner + "::" + sym.name)
- java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
- abort(sym.fullName)
- }
- else {
- mapConstructor(sym, constr)
- constr
- }
- }
-
- def mapConstructor(sym: Symbol, cInfo: ConstructorInfo) = {
- constructors(sym) = cInfo
- }
-
- private def getMethod(sym: Symbol): MethodInfo = {
-
- methods.get(sym) match {
- case Some(method) => method
- case None =>
- val mClass = getType(sym.owner)
- try {
- val method = mClass.GetMethod(msilName(sym), msilParamTypes(sym),
- msilType(sym.tpe.resultType))
- if (method eq null) {
- java.lang.System.out.println("Cannot find method " + sym.owner + "::" + msilName(sym))
- java.lang.System.out.println("scope = " + sym.owner.tpe.decls)
- abort(sym.fullName)
- }
- else {
- mapMethod(sym, method)
- method
- }
- }
- catch {
- case e: Exception =>
- Console.println("While looking up " + mClass + "::" + sym.nameString)
- Console.println("\t" + showsym(sym))
- throw e
- }
- }
- }
-
- /*
- * add a mapping between sym and mInfo
- */
- private def mapMethod(sym: Symbol, mInfo: MethodInfo) {
- assert (mInfo != null, mInfo)
- methods(sym) = mInfo
- }
-
- /*
- * add mapping between sym and method with newName, paramTypes of newClass
- */
- private def mapMethod(sym: Symbol, newClass: MsilType, newName: String, paramTypes: Array[MsilType]) {
- val methodInfo = newClass.GetMethod(newName, paramTypes)
- assert(methodInfo != null, "Can't find mapping for " + sym + " -> " +
- newName + "(" + paramTypes + ")")
- mapMethod(sym, methodInfo)
- if (methodInfo.IsStatic)
- dynToStatMapped += sym
- }
-
- /*
- * add mapping between method with name and paramTypes of clazz to
- * method with newName and newParamTypes of newClass (used for instance
- * for "wait")
- */
- private def mapMethod(
- clazz: Symbol, name: Name, paramTypes: Array[Type],
- newClass: MsilType, newName: String, newParamTypes: Array[MsilType]) {
- val methodSym = lookupMethod(clazz, name, paramTypes)
- assert(methodSym != null, "cannot find method " + name + "(" +
- paramTypes + ")" + " in class " + clazz)
- mapMethod(methodSym, newClass, newName, newParamTypes)
- }
-
- /*
- * add mapping for member with name and paramTypes to member
- * newName of newClass (same parameters)
- */
- private def mapMethod(
- clazz: Symbol, name: Name, paramTypes: Array[Type],
- newClass: MsilType, newName: String) {
- mapMethod(clazz, name, paramTypes, newClass, newName, paramTypes map msilType)
- }
-
- /*
- * add mapping for all methods with name of clazz to the corresponding
- * method (same parameters) with newName of newClass
- */
- private def mapMethod(
- clazz: Symbol, name: Name,
- newClass: MsilType, newName: String) {
- val memberSym: Symbol = clazz.tpe.member(name)
- memberSym.tpe match {
- // alternatives: List[Symbol]
- case OverloadedType(_, alternatives) =>
- alternatives.foreach(s => mapMethod(s, newClass, newName, msilParamTypes(s)))
-
- // paramTypes: List[Type], resType: Type
- case MethodType(params, resType) =>
- mapMethod(memberSym, newClass, newName, msilParamTypes(memberSym))
-
- case _ =>
- abort("member not found: " + clazz + ", " + name)
- }
- }
-
-
- /*
- * find the method in clazz with name and paramTypes
- */
- private def lookupMethod(clazz: Symbol, name: Name, paramTypes: Array[Type]): Symbol = {
- val memberSym = clazz.tpe.member(name)
- memberSym.tpe match {
- case OverloadedType(_, alternatives) =>
- alternatives.find(s => {
- var i: Int = 0
- var typesOK: Boolean = true
- if (paramTypes.length == s.tpe.paramTypes.length) {
- while(i < paramTypes.length) {
- if (paramTypes(i) != s.tpe.paramTypes(i))
- typesOK = false
- i += 1
- }
- } else {
- typesOK = false
- }
- typesOK
- }) match {
- case Some(sym) => sym
- case None => abort("member of " + clazz + ", " + name + "(" +
- paramTypes + ") not found")
- }
-
- case MethodType(_, _) => memberSym
-
- case _ => abort("member not found: " + name + " of " + clazz)
- }
- }
-
- private def showsym(sym: Symbol): String = (sym.toString +
- "\n symbol = " + Flags.flagsToString(sym.flags) + " " + sym +
- "\n owner = " + Flags.flagsToString(sym.owner.flags) + " " + sym.owner
- )
-
- } // class BytecodeGenerator
-
-} // class GenMSIL
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 23f932b5b4..bde17b28fc 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package backend.opt
import scala.tools.nsc.backend.icode.analysis.LubException
-import scala.tools.nsc.symtab._
/**
* @author Iulian Dragos
@@ -72,8 +71,10 @@ abstract class ClosureElimination extends SubComponent {
def name = phaseName
val closser = new ClosureElim
- override def apply(c: IClass): Unit =
- closser analyzeClass c
+ override def apply(c: IClass): Unit = {
+ if (closser ne null)
+ closser analyzeClass c
+ }
}
/**
@@ -83,7 +84,7 @@ abstract class ClosureElimination extends SubComponent {
*
*/
class ClosureElim {
- def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim.value) {
+ def analyzeClass(cls: IClass): Unit = if (settings.Xcloselim) {
log(s"Analyzing ${cls.methods.size} methods in $cls.")
cls.methods foreach { m =>
analyzeMethod(m)
@@ -97,7 +98,7 @@ abstract class ClosureElimination extends SubComponent {
/* Some embryonic copy propagation. */
def analyzeMethod(m: IMethod): Unit = try {if (m.hasCode) {
cpp.init(m)
- cpp.run
+ cpp.run()
m.linearizedBlocks() foreach { bb =>
var info = cpp.in(bb)
@@ -109,7 +110,7 @@ abstract class ClosureElimination extends SubComponent {
val t = info.getBinding(l)
t match {
case Deref(This) | Const(_) =>
- bb.replaceInstruction(i, valueToInstruction(t));
+ bb.replaceInstruction(i, valueToInstruction(t))
debuglog(s"replaced $i with $t")
case _ =>
@@ -120,7 +121,7 @@ abstract class ClosureElimination extends SubComponent {
case LOAD_FIELD(f, false) /* if accessible(f, m.symbol) */ =>
def replaceFieldAccess(r: Record) {
- val Record(cls, bindings) = r
+ val Record(cls, _) = r
info.getFieldNonRecordValue(r, f) foreach { v =>
bb.replaceInstruction(i, DROP(REFERENCE(cls)) :: valueToInstruction(v) :: Nil)
debuglog(s"replaced $i with $v")
@@ -188,28 +189,20 @@ abstract class ClosureElimination extends SubComponent {
case Boxed(LocalVar(v)) =>
LOAD_LOCAL(v)
}
-
- /** is field 'f' accessible from method 'm'? */
- def accessible(f: Symbol, m: Symbol): Boolean =
- f.isPublic || (f.isProtected && (f.enclosingPackageClass == m.enclosingPackageClass))
} /* class ClosureElim */
/** Peephole optimization. */
abstract class PeepholeOpt {
-
- private var method: IMethod = NoIMethod
-
/** Concrete implementations will perform their optimizations here */
def peep(bb: BasicBlock, i1: Instruction, i2: Instruction): Option[List[Instruction]]
var liveness: global.icodes.liveness.LivenessAnalysis = null
def apply(m: IMethod): Unit = if (m.hasCode) {
- method = m
liveness = new global.icodes.liveness.LivenessAnalysis
liveness.init(m)
- liveness.run
+ liveness.run()
m foreachBlock transformBlock
}
@@ -235,7 +228,7 @@ abstract class ClosureElimination extends SubComponent {
h = t.head
t = t.tail
}
- } while (redo);
+ } while (redo)
b fromList newInstructions
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
new file mode 100644
index 0000000000..43c8527f41
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala
@@ -0,0 +1,620 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+
+package scala
+package tools.nsc
+package backend.opt
+
+import scala.tools.nsc.backend.icode.analysis.LubException
+import scala.annotation.tailrec
+
+/**
+ * ConstantOptimization uses abstract interpretation to approximate for
+ * each instruction what constants a variable or stack slot might hold
+ * or cannot hold. From this it will eliminate unreachable conditionals
+ * where only one branch is reachable, e.g. to eliminate unnecessary
+ * null checks.
+ *
+ * With some more work it could be extended to
+ * - cache stable values (final fields, modules) in locals
+ * - replace the copy propagation in ClosureElilmination
+ * - fold constants
+ * - eliminate unnecessary stores and loads
+ * - propagate knowledge gathered from conditionals for further optimization
+ */
+abstract class ConstantOptimization extends SubComponent {
+ import global._
+ import icodes._
+ import icodes.opcodes._
+
+ val phaseName = "constopt"
+
+ /** Create a new phase */
+ override def newPhase(p: Phase) = new ConstantOptimizationPhase(p)
+
+ /**
+ * The constant optimization phase.
+ */
+ class ConstantOptimizationPhase(prev: Phase) extends ICodePhase(prev) {
+
+ def name = phaseName
+
+ override def apply(c: IClass) {
+ if (settings.YconstOptimization) {
+ val analyzer = new ConstantOptimizer
+ analyzer optimizeClass c
+ }
+ }
+ }
+
+ class ConstantOptimizer {
+ def optimizeClass(cls: IClass) {
+ log(s"Analyzing ${cls.methods.size} methods in $cls.")
+ cls.methods foreach { m =>
+ optimizeMethod(m)
+ }
+ }
+
+ def optimizeMethod(m: IMethod) {
+ if (m.hasCode) {
+ log(s"Analyzing ${m.symbol}")
+ val replacementInstructions = interpretMethod(m)
+ for (block <- m.blocks) {
+ if (replacementInstructions contains block) {
+ val instructions = replacementInstructions(block)
+ block.replaceInstruction(block.lastInstruction, instructions)
+ }
+ }
+ }
+ }
+
+ /**
+ * A single possible (or impossible) datum that can be held in Contents
+ */
+ private sealed abstract class Datum
+ /**
+ * A constant datum
+ */
+ private case class Const(c: Constant) extends Datum {
+ def isIntAssignable = c.tag >= BooleanTag && c.tag <= IntTag
+ def toInt = c.tag match {
+ case BooleanTag => if (c.booleanValue) 1 else 0
+ case _ => c.intValue
+ }
+
+ /**
+ * True if this constant would compare to other as true under primitive eq
+ */
+ override def equals(other: Any) = other match {
+ case oc @ Const(o) => (this eq oc) || (if (this.isIntAssignable && oc.isIntAssignable) this.toInt == oc.toInt else c.value == o.value)
+ case _ => false
+ }
+
+ /**
+ * Hash code consistent with equals
+ */
+ override def hashCode = if (this.isIntAssignable) this.toInt else c.hashCode
+
+ }
+ /**
+ * A datum that has been Boxed via a BOX instruction
+ */
+ private case class Boxed(c: Datum) extends Datum
+
+ /**
+ * The knowledge we have about the abstract state of one location in terms
+ * of what constants it might or cannot hold. Forms a lower
+ * lattice where lower elements in the lattice indicate less knowledge.
+ *
+ * With the following partial ordering (where '>' indicates more precise knowledge)
+ *
+ * Possible(xs) > Possible(xs + y)
+ * Possible(xs) > Impossible(ys)
+ * Impossible(xs + y) > Impossible(xs)
+ *
+ * and the following merges, which indicate merging knowledge from two paths through
+ * the code,
+ *
+ * // left must be 1 or 2, right must be 2 or 3 then we must have a 1, 2 or 3
+ * Possible(xs) merge Possible(ys) => Possible(xs union ys)
+ *
+ * // Left says can't be 2 or 3, right says can't be 3 or 4
+ * // then it's not 3 (it could be 2 from the right or 4 from the left)
+ * Impossible(xs) merge Impossible(ys) => Impossible(xs intersect ys)
+ *
+ * // Left says it can't be 2 or 3, right says it must be 3 or 4, then
+ * // it can't be 2 (left rules out 4 and right says 3 is possible)
+ * Impossible(xs) merge Possible(ys) => Impossible(xs -- ys)
+ *
+ * Intuitively, Possible(empty) says that a location can't hold anything,
+ * it's uninitialized. However, Possible(empty) never appears in the code.
+ *
+ * Conversely, Impossible(empty) says nothing is impossible, it could be
+ * anything. Impossible(empty) is given a synonym UNKNOWN and is used
+ * for, e.g., the result of an arbitrary method call.
+ */
+ private sealed abstract class Contents {
+ /**
+ * Join this Contents with another coming from another path. Join enforces
+ * the lattice structure. It is symmetrical and never moves upward in the
+ * lattice
+ */
+ final def merge(other: Contents): Contents = if (this eq other) this else (this, other) match {
+ case (Possible(possible1), Possible(possible2)) =>
+ Possible(possible1 union possible2)
+ case (Impossible(impossible1), Impossible(impossible2)) =>
+ Impossible(impossible1 intersect impossible2)
+ case (Impossible(impossible), Possible(possible)) =>
+ Impossible(impossible -- possible)
+ case (Possible(possible), Impossible(impossible)) =>
+ Impossible(impossible -- possible)
+ }
+ // TODO we could have more fine-grained knowledge, e.g. know that 0 < x < 3. But for now equality/inequality is a good start.
+ def mightEqual(other: Contents): Boolean
+ def mightNotEqual(other: Contents): Boolean
+ }
+ private def SingleImpossible(x: Datum) = new Impossible(Set(x))
+
+ /**
+ * The location is known to have one of a set of values.
+ */
+ private case class Possible(possible: Set[Datum]) extends Contents {
+ assert(possible.nonEmpty, "Contradiction: had an empty possible set indicating an uninitialized location")
+ def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
+ // two Possibles might be equal if they have any possible members in common
+ case Possible(possible2) => (possible intersect possible2).nonEmpty
+ // a possible can be equal to an impossible if the impossible doesn't rule
+ // out all the possibilities
+ case Impossible(possible2) => (possible -- possible2).nonEmpty
+ })
+ def mightNotEqual(other: Contents): Boolean = (this ne other) && (other match {
+ // two Possibles might not be equal if either has possible members that the other doesn't
+ case Possible(possible2) => (possible -- possible2).nonEmpty || (possible2 -- possible).nonEmpty
+ case Impossible(_) => true
+ })
+ }
+ private def SinglePossible(x: Datum) = new Possible(Set(x))
+
+ /**
+ * The location is known to not have any of a set of values value (e.g null).
+ */
+ private case class Impossible(impossible: Set[Datum]) extends Contents {
+ def mightEqual(other: Contents): Boolean = (this eq other) || (other match {
+ case Possible(_) => other mightEqual this
+ case _ => true
+ })
+ def mightNotEqual(other: Contents): Boolean = (this eq other) || (other match {
+ case Possible(_) => other mightNotEqual this
+ case _ => true
+ })
+ }
+
+ /**
+ * Our entire knowledge about the contents of all variables and the stack. It forms
+ * a lattice primarily driven by the lattice structure of Contents.
+ *
+ * In addition to the rules of contents, State has the following properties:
+ * - The merge of two sets of locals holds the merges of locals found in the intersection
+ * of the two sets of locals. Locals not found in a
+ * locals map are thus possibly uninitialized and attempting to load them results
+ * in an error.
+ * - The stack heights of two states must match otherwise it's an error to merge them
+ *
+ * State is immutable in order to aid in structure sharing of local maps and stacks
+ */
+ private case class State(locals: Map[Local, Contents], stack: List[Contents]) {
+ def mergeLocals(olocals: Map[Local, Contents]): Map[Local, Contents] = if (locals eq olocals) locals else Map((for {
+ key <- (locals.keySet intersect olocals.keySet).toSeq
+ } yield (key, locals(key) merge olocals(key))): _*)
+
+ def merge(other: State): State = if (this eq other) this else {
+ @tailrec def mergeStacks(l: List[Contents], r: List[Contents], out: List[Contents]): List[Contents] = (l, r) match {
+ case (Nil, Nil) => out.reverse
+ case (l, r) if l eq r => out.reverse ++ l
+ case (lhead :: ltail, rhead :: rtail) => mergeStacks(ltail, rtail, (lhead merge rhead) :: out)
+ case _ => sys.error("Mismatched stack heights")
+ }
+
+ val newLocals = mergeLocals(other.locals)
+
+ val newStack = if (stack eq other.stack) stack else mergeStacks(stack, other.stack, Nil)
+ State(newLocals, newStack)
+ }
+
+ /**
+ * Peek at the top of the stack without modifying it. Error if the stack is empty
+ */
+ def peek(n: Int): Contents = stack(n)
+ /**
+ * Push contents onto a stack
+ */
+ def push(contents: Contents): State = this copy (stack = contents :: stack)
+ /**
+ * Drop n elements from the stack
+ */
+ def drop(number: Int): State = this copy (stack = stack drop number)
+ /**
+ * Store the top of the stack into the specified local. An error if the stack
+ * is empty
+ */
+ def store(variable: Local): State = {
+ val contents = stack.head
+ val newVariables = locals + ((variable, contents))
+ new State(newVariables, stack.tail)
+ }
+ /**
+ * Load the specified local onto the top of the stack. An error the the local is uninitialized.
+ */
+ def load(variable: Local): State = {
+ val contents: Contents = locals.getOrElse(variable, sys.error(s"$variable is not initialized"))
+ push(contents)
+ }
+ /**
+ * A copy of this State with an empty stack
+ */
+ def cleanStack: State = if (stack.isEmpty) this else this copy (stack = Nil)
+ }
+
+ // some precomputed constants
+ private val NULL = Const(Constant(null: Any))
+ private val UNKNOWN = Impossible(Set.empty)
+ private val NOT_NULL = SingleImpossible(NULL)
+ private val CONST_UNIT = SinglePossible(Const(Constant(())))
+ private val CONST_FALSE = SinglePossible(Const(Constant(false)))
+ private val CONST_ZERO_BYTE = SinglePossible(Const(Constant(0: Byte)))
+ private val CONST_ZERO_SHORT = SinglePossible(Const(Constant(0: Short)))
+ private val CONST_ZERO_CHAR = SinglePossible(Const(Constant(0: Char)))
+ private val CONST_ZERO_INT = SinglePossible(Const(Constant(0: Int)))
+ private val CONST_ZERO_LONG = SinglePossible(Const(Constant(0: Long)))
+ private val CONST_ZERO_FLOAT = SinglePossible(Const(Constant(0.0f)))
+ private val CONST_ZERO_DOUBLE = SinglePossible(Const(Constant(0.0d)))
+ private val CONST_NULL = SinglePossible(NULL)
+
+ /**
+ * Given a TypeKind, figure out what '0' for it means in order to interpret CZJUMP
+ */
+ private def getZeroOf(k: TypeKind): Contents = k match {
+ case UNIT => CONST_UNIT
+ case BOOL => CONST_FALSE
+ case BYTE => CONST_ZERO_BYTE
+ case SHORT => CONST_ZERO_SHORT
+ case CHAR => CONST_ZERO_CHAR
+ case INT => CONST_ZERO_INT
+ case LONG => CONST_ZERO_LONG
+ case FLOAT => CONST_ZERO_FLOAT
+ case DOUBLE => CONST_ZERO_DOUBLE
+ case REFERENCE(_) => CONST_NULL
+ case ARRAY(_) => CONST_NULL
+ case BOXED(_) => CONST_NULL
+ case ConcatClass => abort("no zero of ConcatClass")
+ }
+
+ // normal locals can't be null, so we use null to mean the magic 'this' local
+ private val THIS_LOCAL: Local = null
+
+ /**
+ * interpret a single instruction to find its impact on the abstract state
+ */
+ private def interpretInst(in: State, inst: Instruction): State = {
+ // pop the consumed number of values off the `in` state's stack, producing a new state
+ def dropConsumed: State = in drop inst.consumed
+
+ inst match {
+ case THIS(_) =>
+ in load THIS_LOCAL
+
+ case CONSTANT(k) =>
+ // treat NaN as UNKNOWN because NaN must never equal NaN
+ val const = if (k.isNaN) UNKNOWN
+ else SinglePossible(Const(k))
+ in push const
+
+ case LOAD_ARRAY_ITEM(_) | LOAD_FIELD(_, _) | CALL_PRIMITIVE(_) =>
+ dropConsumed push UNKNOWN
+
+ case LOAD_LOCAL(local) =>
+ // TODO if a local is known to hold a constant then we can replace this instruction with a push of that constant
+ in load local
+
+ case STORE_LOCAL(local) =>
+ in store local
+
+ case STORE_THIS(_) =>
+ // if a local is already known to have a constant and we're replacing with the same constant then we can
+ // replace this with a drop
+ in store THIS_LOCAL
+
+ case CALL_METHOD(_, _) =>
+ // TODO we could special case implementations of equals that are known, e.g. String#equals
+ // We could turn Possible(string constants).equals(Possible(string constants) into an eq check
+ // We could turn nonConstantString.equals(constantString) into constantString.equals(nonConstantString)
+ // and eliminate the null check that likely precedes this call
+ val initial = dropConsumed
+ (0 until inst.produced).foldLeft(initial) { case (know, _) => know push UNKNOWN }
+
+ case BOX(_) =>
+ val value = in peek 0
+ // we simulate boxing by, um, boxing the possible/impossible contents
+ // so if we have Possible(1,2) originally then we'll end up with
+ // a Possible(Boxed(1), Boxed(2))
+ // Similarly, if we know the input is not a 0 then we'll know the
+ // output is not a Boxed(0)
+ val newValue = value match {
+ case Possible(values) => Possible(values map Boxed)
+ case Impossible(values) => Impossible(values map Boxed)
+ }
+ dropConsumed push newValue
+
+ case UNBOX(_) =>
+ val value = in peek 0
+ val newValue = value match {
+ // if we have a Possible, then all the possibilities
+ // should themselves be Boxes. In that
+ // case we can merge them to figure out what the UNBOX will produce
+ case Possible(inners) =>
+ assert(inners.nonEmpty, "Empty possible set indicating an uninitialized location")
+ val sanitized: Set[Contents] = (inners map {
+ case Boxed(content) => SinglePossible(content)
+ case _ => UNKNOWN
+ })
+ sanitized reduce (_ merge _)
+ // if we have an impossible then the thing that's impossible
+ // should be a box. We'll unbox that to see what we get
+ case unknown@Impossible(inners) =>
+ if (inners.isEmpty) {
+ unknown
+ } else {
+ val sanitized: Set[Contents] = (inners map {
+ case Boxed(content) => SingleImpossible(content)
+ case _ => UNKNOWN
+ })
+ sanitized reduce (_ merge _)
+ }
+ }
+ dropConsumed push newValue
+
+ case LOAD_MODULE(_) | NEW(_) | LOAD_EXCEPTION(_) =>
+ in push NOT_NULL
+
+ case CREATE_ARRAY(_, _) =>
+ dropConsumed push NOT_NULL
+
+ case IS_INSTANCE(_) =>
+ // TODO IS_INSTANCE is going to be followed by a C(Z)JUMP
+ // and if IS_INSTANCE/C(Z)JUMP the branch for "true" can
+ // know that whatever was checked was not a null
+ // see the TODO on CJUMP for more information about propagating null
+ // information
+ // TODO if the top of stack is guaranteed null then we can eliminate this IS_INSTANCE check and
+ // replace with a constant false, but how often is a knowable null checked for instanceof?
+ // TODO we could track type information and statically know to eliminate IS_INSTANCE
+ // which might be a nice win under specialization
+ dropConsumed push UNKNOWN // it's actually a Possible(true, false) but since the following instruction
+ // will be a conditional jump comparing to true or false there
+ // nothing to be gained by being more precise
+
+ case CHECK_CAST(_) =>
+ // TODO we could track type information and statically know to eliminate CHECK_CAST
+ // but that's probably not a huge win
+ in
+
+ case DUP(_) =>
+ val value = in peek 0
+ in push value
+
+ case DROP(_) | MONITOR_ENTER() | MONITOR_EXIT() | STORE_ARRAY_ITEM(_) | STORE_FIELD(_, _) =>
+ dropConsumed
+
+ case SCOPE_ENTER(_) | SCOPE_EXIT(_) =>
+ in
+
+ case JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | RETURN(_) | THROW(_) | SWITCH(_, _) =>
+ dumpClassesAndAbort("Unexpected block ending instruction: " + inst)
+ }
+ }
+ /**
+ * interpret the last instruction of a block which will be jump, a conditional branch, a throw, or a return.
+ * It will result in a map from target blocks to the input state computed for that block. It
+ * also computes a replacement list of instructions
+ */
+ private def interpretLast(in: State, inst: Instruction): (Map[BasicBlock, State], List[Instruction]) = {
+ def canSwitch(in1: Contents, tagSet: List[Int]) = {
+ in1 mightEqual Possible(tagSet.toSet map { tag: Int => Const(Constant(tag)) })
+ }
+
+ /* common code for interpreting CJUMP and CZJUMP */
+ def interpretConditional(kind: TypeKind, val1: Contents, val2: Contents, success: BasicBlock, failure: BasicBlock, cond: TestOp): (Map[BasicBlock, State], List[Instruction]) = {
+ // TODO use reaching analysis to update the state in the two branches
+ // e.g. if the comparison was checking null equality on local x
+ // then the in the success branch we know x is null and
+ // on the failure branch we know it is not
+ // in fact, with copy propagation we could propagate that knowledge
+ // back through a chain of locations
+ //
+ // TODO if we do all that we need to be careful in the
+ // case that success and failure are the same target block
+ // because we're using a Map and don't want one possible state to clobber the other
+ // alternative mayb we should just replace the conditional with a jump if both targets are the same
+
+ def mightEqual = val1 mightEqual val2
+ def mightNotEqual = val1 mightNotEqual val2
+ def guaranteedEqual = mightEqual && !mightNotEqual
+
+ def succPossible = cond match {
+ case EQ => mightEqual
+ case NE => mightNotEqual
+ case LT | GT => !guaranteedEqual // if the two are guaranteed to be equal then they can't be LT/GT
+ case LE | GE => true
+ }
+
+ def failPossible = cond match {
+ case EQ => mightNotEqual
+ case NE => mightEqual
+ case LT | GT => true
+ case LE | GE => !guaranteedEqual // if the two are guaranteed to be equal then they must be LE/GE
+ }
+
+ val out = in drop inst.consumed
+
+ var result = Map[BasicBlock, State]()
+ if (succPossible) {
+ result += ((success, out))
+ }
+
+ if (failPossible) {
+ result += ((failure, out))
+ }
+
+ val replacements = if (result.size == 1) List.fill(inst.consumed)(DROP(kind)) :+ JUMP(result.keySet.head)
+ else inst :: Nil
+
+ (result, replacements)
+ }
+
+ inst match {
+ case JUMP(whereto) =>
+ (Map((whereto, in)), inst :: Nil)
+
+ case CJUMP(success, failure, cond, kind) =>
+ val in1 = in peek 0
+ val in2 = in peek 1
+ interpretConditional(kind, in1, in2, success, failure, cond)
+
+ case CZJUMP(success, failure, cond, kind) =>
+ val in1 = in peek 0
+ val in2 = getZeroOf(kind)
+ interpretConditional(kind, in1, in2, success, failure, cond)
+
+ case SWITCH(tags, labels) =>
+ val in1 = in peek 0
+ val reachableNormalLabels = tags zip labels collect { case (tagSet, label) if canSwitch(in1, tagSet) => label }
+ val reachableLabels = if (labels.lengthCompare(tags.length) > 0) {
+ // if we've got an extra label then it's the default
+ val defaultLabel = labels.last
+ // see if the default is reachable by seeing if the input might be out of the set
+ // of all tags
+ val allTags = Possible(tags.flatten.toSet map { tag: Int => Const(Constant(tag)) })
+ if (in1 mightNotEqual allTags) {
+ reachableNormalLabels :+ defaultLabel
+ } else {
+ reachableNormalLabels
+ }
+ } else {
+ reachableNormalLabels
+ }
+ // TODO similar to the comment in interpretConditional, we should update our the State going into each
+ // branch based on which tag is being matched. Also, just like interpretConditional, if target blocks
+ // are the same we need to merge State rather than clobber
+
+ // alternative, maybe we should simplify the SWITCH to not have same target labels
+ val newState = in drop inst.consumed
+ val result = Map(reachableLabels map { label => (label, newState) }: _*)
+ if (reachableLabels.size == 1) (result, DROP(INT) :: JUMP(reachableLabels.head) :: Nil)
+ else (result, inst :: Nil)
+
+ // these instructions don't have target blocks
+ // (exceptions are assumed to be reachable from all instructions)
+ case RETURN(_) | THROW(_) =>
+ (Map.empty, inst :: Nil)
+
+ case _ =>
+ dumpClassesAndAbort("Unexpected non-block ending instruction: " + inst)
+ }
+ }
+
+ /**
+ * Analyze a single block to find how it transforms an input state into a states for its successor blocks
+ * Also computes a list of instructions to be used to replace its last instruction
+ */
+ private def interpretBlock(in: State, block: BasicBlock): (Map[BasicBlock, State], Map[BasicBlock, State], List[Instruction]) = {
+ debuglog(s"interpreting block $block")
+ // number of instructions excluding the last one
+ val normalCount = block.size - 1
+
+ val exceptionState = in.cleanStack
+ var normalExitState = in
+ var idx = 0
+ while (idx < normalCount) {
+ val inst = block(idx)
+ normalExitState = interpretInst(normalExitState, inst)
+ if (normalExitState.locals ne exceptionState.locals)
+ exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals)
+ idx += 1
+ }
+
+ val pairs = block.exceptionSuccessors map { b => (b, exceptionState) }
+ val exceptionMap = Map(pairs: _*)
+
+ val (normalExitMap, newInstructions) = interpretLast(normalExitState, block.lastInstruction)
+
+ (normalExitMap, exceptionMap, newInstructions)
+ }
+
+ /**
+ * Analyze a single method to find replacement instructions
+ */
+ private def interpretMethod(m: IMethod): Map[BasicBlock, List[Instruction]] = {
+ import scala.collection.mutable.{ Set => MSet, Map => MMap }
+
+ debuglog(s"interpreting method $m")
+ var iterations = 0
+
+ // initially we know that 'this' is not null and the params are initialized to some unknown value
+ val initThis: Iterator[(Local, Contents)] = if (m.isStatic) Iterator.empty else Iterator.single((THIS_LOCAL, NOT_NULL))
+ val initOtherLocals: Iterator[(Local, Contents)] = m.params.iterator map { param => (param, UNKNOWN) }
+ val initialLocals: Map[Local, Contents] = Map((initThis ++ initOtherLocals).toSeq: _*)
+ val initialState = State(initialLocals, Nil)
+
+ // worklist of basic blocks to process, initially the start block
+ val worklist = MSet(m.startBlock)
+ // worklist of exception basic blocks. They're kept in a separate set so they can be
+ // processed after normal flow basic blocks. That's because exception basic blocks
+ // are more likely to have multiple predecessors and queueing them for later
+ // increases the chances that they'll only need to be interpreted once
+ val exceptionlist = MSet[BasicBlock]()
+ // our current best guess at what the input state is for each block
+ // initially we only know about the start block
+ val inputState = MMap[BasicBlock, State]((m.startBlock, initialState))
+
+ // update the inputState map based on new information from interpreting a block
+ // When the input state of a block changes, add it back to the work list to be
+ // reinterpreted
+ def updateInputStates(outputStates: Map[BasicBlock, State], worklist: MSet[BasicBlock]) {
+ for ((block, newState) <- outputStates) {
+ val oldState = inputState get block
+ val updatedState = oldState map (x => x merge newState) getOrElse newState
+ if (oldState != Some(updatedState)) {
+ worklist add block
+ inputState(block) = updatedState
+ }
+ }
+ }
+
+ // the instructions to be used as the last instructions on each block
+ val replacements = MMap[BasicBlock, List[Instruction]]()
+
+ while (worklist.nonEmpty || exceptionlist.nonEmpty) {
+ if (worklist.isEmpty) {
+ // once the worklist is empty, start processing exception blocks
+ val block = exceptionlist.head
+ exceptionlist remove block
+ worklist add block
+ } else {
+ iterations += 1
+ val block = worklist.head
+ worklist remove block
+ val (normalExitMap, exceptionMap, newInstructions) = interpretBlock(inputState(block), block)
+
+ updateInputStates(normalExitMap, worklist)
+ updateInputStates(exceptionMap, exceptionlist)
+ replacements(block) = newInstructions
+ }
+ }
+
+ debuglog(s"method $m with ${m.blocks.size} reached fixpoint in $iterations iterations")
+ replacements.toMap
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index 1beed3f420..483bff6467 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc
package backend.opt
import scala.collection.{ mutable, immutable }
-import symtab._
/**
*/
@@ -34,7 +33,7 @@ abstract class DeadCodeElimination extends SubComponent {
val dce = new DeadCode()
override def apply(c: IClass) {
- if (settings.Xdce.value)
+ if (settings.Xdce && (dce ne null))
dce.analyzeClass(c)
}
}
@@ -55,7 +54,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
+ val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
@@ -83,7 +82,7 @@ abstract class DeadCodeElimination extends SubComponent {
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
- debuglog("dead code elimination on " + m);
+ debuglog("dead code elimination on " + m)
dropOf.clear()
localStores.clear()
clobbers.clear()
@@ -105,17 +104,17 @@ abstract class DeadCodeElimination extends SubComponent {
/** collect reaching definitions and initial useful instructions for this method. */
def collectRDef(m: IMethod): Unit = if (m.hasCode) {
- defs = immutable.HashMap.empty; worklist.clear(); useful.clear();
- rdef.init(m);
- rdef.run;
+ defs = immutable.HashMap.empty; worklist.clear(); useful.clear()
+ rdef.init(m)
+ rdef.run()
m foreachBlock { bb =>
useful(bb) = new mutable.BitSet(bb.size)
- var rd = rdef.in(bb);
+ var rd = rdef.in(bb)
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
// utility for adding to worklist
- def moveToWorkList() = moveToWorkListIf(true)
+ def moveToWorkList() = moveToWorkListIf(cond = true)
// utility for (conditionally) adding to worklist
def moveToWorkListIf(cond: Boolean) =
@@ -131,7 +130,7 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
- moveToWorkListIf(false)
+ moveToWorkListIf(cond = false)
case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
@@ -182,8 +181,10 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
moveToWorkListIf(necessary)
+ case LOAD_MODULE(sym) if isLoadNeeded(sym) =>
+ moveToWorkList() // SI-4859 Module initialization might side-effect.
case _ => ()
- moveToWorkListIf(false)
+ moveToWorkListIf(cond = false)
}
rd = rdef.interpret(bb, idx, rd)
}
@@ -217,7 +218,7 @@ abstract class DeadCodeElimination extends SubComponent {
// worklist so we also mark their reaching defs as useful - see SI-7060
if (!useful(bb)(idx)) {
useful(bb) += idx
- dropOf.get(bb, idx) foreach {
+ dropOf.get((bb, idx)) foreach {
for ((bb1, idx1) <- _) {
/*
* SI-7060: A drop that we now mark as useful can be reached via several paths,
@@ -339,13 +340,13 @@ abstract class DeadCodeElimination extends SubComponent {
m foreachBlock { bb =>
debuglog(bb + ":")
val oldInstr = bb.toList
- bb.open
- bb.clear
+ bb.open()
+ bb.clear()
for (Pair(i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
debuglog(" * " + i + " is useful")
bb.emit(i, i.pos)
- compensations.get(bb, idx) match {
+ compensations.get((bb, idx)) match {
case Some(is) => is foreach bb.emit
case None => ()
}
@@ -373,7 +374,7 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- if (bb.nonEmpty) bb.close
+ if (bb.nonEmpty) bb.close()
else log(s"empty block encountered in $m")
}
}
@@ -412,13 +413,6 @@ abstract class DeadCodeElimination extends SubComponent {
compensations
}
- private def withClosed[a](bb: BasicBlock)(f: => a): a = {
- if (bb.nonEmpty) bb.close
- val res = f
- if (bb.nonEmpty) bb.open
- res
- }
-
private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
index ab238af239..cecabda171 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/InlineExceptionHandlers.scala
@@ -4,7 +4,6 @@
package scala.tools.nsc
package backend.opt
-import scala.util.control.Breaks._
/**
* This optimization phase inlines the exception handlers so that further phases can optimize the code better
@@ -53,7 +52,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
import icodes._
import icodes.opcodes._
- val phaseName = "inlineExceptionHandlers"
+ val phaseName = "inlinehandlers"
/** Create a new phase */
override def newPhase(p: Phase) = new InlineExceptionHandlersPhase(p)
@@ -70,9 +69,9 @@ abstract class InlineExceptionHandlers extends SubComponent {
* -some exception handler duplicates expect the exception on the stack while others expect it in a local
* => Option[Local]
*/
- private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]
+ private val handlerCopies = perRunCaches.newMap[BasicBlock, Option[(Option[Local], BasicBlock)]]()
/* This map is the inverse of handlerCopies, used to compute the stack of duplicate blocks */
- private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]
+ private val handlerCopiesInverted = perRunCaches.newMap[BasicBlock, (BasicBlock, TypeKind)]()
private def handlerLocal(bb: BasicBlock): Option[Local] =
for (v <- handlerCopies get bb ; (local, block) <- v ; l <- local) yield l
@@ -89,7 +88,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
/** Apply exception handler inlining to a class */
override def apply(c: IClass): Unit =
- if (settings.inlineHandlers.value) {
+ if (settings.inlineHandlers) {
val startTime = System.currentTimeMillis
currentClass = c
@@ -263,7 +262,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
if (analyzedMethod eq NoIMethod) {
analyzedMethod = bblock.method
tfa.init(bblock.method)
- tfa.run
+ tfa.run()
log(" performed tfa on method: " + bblock.method)
for (block <- bblock.method.blocks.sortBy(_.label))
@@ -358,7 +357,7 @@ abstract class InlineExceptionHandlers extends SubComponent {
}
val caughtException = toTypeKind(caughtClass.tpe)
// copy the exception handler code once again, dropping the LOAD_EXCEPTION
- val copy = handler.code.newBlock
+ val copy = handler.code.newBlock()
copy.emitOnly((handler.iterator drop dropCount).toSeq: _*)
// extend the handlers of the handler to the copy
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 498db78636..a6eedbd07e 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -50,6 +50,7 @@ abstract class Inliners extends SubComponent {
val phaseName = "inliner"
/** Debug - for timing the inliner. */
+ /****
private def timed[T](s: String, body: => T): T = {
val t1 = System.currentTimeMillis()
val res = body
@@ -60,6 +61,7 @@ abstract class Inliners extends SubComponent {
res
}
+ ****/
/** Look up implementation of method 'sym in 'clazz'.
*/
@@ -193,7 +195,7 @@ abstract class Inliners extends SubComponent {
private var currentIClazz: IClass = _
private def warn(pos: Position, msg: String) = currentIClazz.cunit.inlinerWarning(pos, msg)
- private def ownedName(sym: Symbol): String = afterUncurry {
+ private def ownedName(sym: Symbol): String = exitingUncurry {
val count = (
if (!sym.isMethod) 1
else if (sym.owner.isAnonymousFunction) 3
@@ -230,7 +232,7 @@ abstract class Inliners extends SubComponent {
val hasRETURN = containsRETURN(incm.code.blocksList) || (incm.exh exists { eh => containsRETURN(eh.blocks) })
var a: analysis.MethodTFA = null
- if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run }
+ if(hasRETURN) { a = new analysis.MethodTFA(incm); a.run() }
if(forceable) { recentTFAs.put(incm.symbol, (hasRETURN, a)) }
@@ -240,7 +242,7 @@ abstract class Inliners extends SubComponent {
def clearCaches() {
// methods
NonPublicRefs.usesNonPublics.clear()
- recentTFAs.clear
+ recentTFAs.clear()
tfa.knownUnsafe.clear()
tfa.knownSafe.clear()
tfa.knownNever.clear()
@@ -263,7 +265,7 @@ abstract class Inliners extends SubComponent {
}
def analyzeClass(cls: IClass): Unit =
- if (settings.inline.value) {
+ if (settings.inline) {
inlineLog("class", s"${cls.symbol.decodedName}", s"analyzing ${cls.methods.size} methods in $cls")
this.currentIClazz = cls
@@ -279,7 +281,7 @@ abstract class Inliners extends SubComponent {
}
val tfa = new analysis.MTFAGrowable()
- tfa.stat = global.opt.printStats
+ tfa.stat = global.settings.Ystatistics.value
val staleOut = new mutable.ListBuffer[BasicBlock]
val splicedBlocks = mutable.Set.empty[BasicBlock]
val staleIn = mutable.Set.empty[BasicBlock]
@@ -317,11 +319,11 @@ abstract class Inliners extends SubComponent {
* */
def analyzeMethod(m: IMethod): Unit = {
// m.normalize
- if (settings.debug.value)
+ if (settings.debug)
inlineLog("caller", ownedName(m.symbol), "in " + m.symbol.owner.fullName)
- var sizeBeforeInlining = m.code.blockCount
- var instrBeforeInlining = m.code.instructionCount
+ val sizeBeforeInlining = m.code.blockCount
+ val instrBeforeInlining = m.code.instructionCount
var retry = false
var count = 0
@@ -340,7 +342,7 @@ abstract class Inliners extends SubComponent {
inlineWithoutTFA(inputBlocks, callsites)
}
- /**
+ /*
* Inline straightforward callsites (those that can be inlined without a TFA).
*
* To perform inlining, all we need to know is listed as formal params in `analyzeInc()`:
@@ -361,7 +363,7 @@ abstract class Inliners extends SubComponent {
assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
if(analyzeInc(ocm, x, ocm.method.owner, -1, ocm.method)) {
inlineCount += 1
- break
+ break()
}
}
}
@@ -370,7 +372,7 @@ abstract class Inliners extends SubComponent {
inlineCount
}
- /**
+ /*
* Decides whether it's feasible and desirable to inline the body of the method given by `concreteMethod`
* at the program point given by `i` (a callsite). The boolean result indicates whether inlining was performed.
*
@@ -380,8 +382,8 @@ abstract class Inliners extends SubComponent {
val shouldWarn = hasInline(i.method)
def warnNoInline(reason: String): Boolean = {
- def msg = "Could not inline required method %s because %s.".format(i.method.originalName.decode, reason)
- if (settings.debug.value)
+ def msg = "Could not inline required method %s because %s.".format(i.method.unexpandedName.decode, reason)
+ if (settings.debug)
inlineLog("fail", i.method.fullName, reason)
if (shouldWarn)
warn(i.pos, msg)
@@ -477,9 +479,9 @@ abstract class Inliners extends SubComponent {
* As a whole, both `preInline()` invocations amount to priming the inlining process,
* so that the first TFA that is run afterwards is able to gain more information as compared to a cold-start.
*/
- val totalPreInlines = {
- val firstRound = preInline(true)
- if(firstRound == 0) 0 else (firstRound + preInline(false))
+ /*val totalPreInlines = */ { // Val name commented out to emphasize it is never used
+ val firstRound = preInline(isFirstRound = true)
+ if(firstRound == 0) 0 else (firstRound + preInline(isFirstRound = false))
}
staleOut.clear()
splicedBlocks.clear()
@@ -511,7 +513,7 @@ abstract class Inliners extends SubComponent {
for (cm <- cms; if tfa.remainingCALLs.isDefinedAt(cm)) {
val analysis.CallsiteInfo(_, receiver, stackLength, concreteMethod) = tfa.remainingCALLs(cm)
if (analyzeInc(cm, bb, receiver, stackLength, concreteMethod)) {
- break
+ break()
}
}
}
@@ -563,13 +565,12 @@ abstract class Inliners extends SubComponent {
while (retry && count < MAX_INLINE_RETRY)
for(inlFail <- tfa.warnIfInlineFails) {
- warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.originalName.decode)
+ warn(inlFail.pos, "At the end of the day, could not inline @inline-marked method " + inlFail.method.unexpandedName.decode)
}
- m.normalize
+ m.normalize()
if (sizeBeforeInlining > 0) {
val instrAfterInlining = m.code.instructionCount
- val prefix = if ((instrAfterInlining > 2 * instrBeforeInlining) && (instrAfterInlining > 200)) "!!" else ""
val inlinings = caller.inlinedCalls
if (inlinings > 0) {
val s1 = s"instructions $instrBeforeInlining -> $instrAfterInlining"
@@ -584,7 +585,7 @@ abstract class Inliners extends SubComponent {
private def isHigherOrderMethod(sym: Symbol) = (
sym.isMethod
- && beforeExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
+ && enteringExplicitOuter(sym.info.paramTypes exists isFunctionType) // was "at erasurePhase.prev"
)
/** Should method 'sym' being called in 'receiver' be loaded from disk? */
@@ -601,7 +602,6 @@ abstract class Inliners extends SubComponent {
override def toString = m.toString
val sym = m.symbol
- val name = sym.name
def owner = sym.owner
def paramTypes = sym.info.paramTypes
def minimumStack = paramTypes.length + 1
@@ -617,13 +617,11 @@ abstract class Inliners extends SubComponent {
def length = blocks.length
def openBlocks = blocks filterNot (_.closed)
def instructions = m.code.instructions
- // def linearized = linearizer linearize m
def isSmall = (length <= SMALL_METHOD_SIZE) && blocks(0).length < 10
def isLarge = length > MAX_INLINE_SIZE
def isRecursive = m.recursive
def hasHandlers = handlers.nonEmpty || m.bytecodeHasEHs
- def hasClosureParam = paramTypes exists (tp => isByNameParamType(tp) || isFunctionType(tp))
def isSynchronized = sym.hasFlag(Flags.SYNCHRONIZED)
def hasNonFinalizerHandler = handlers exists {
@@ -731,7 +729,6 @@ abstract class Inliners extends SubComponent {
*/
sealed abstract class InlineSafetyInfo {
def isSafe = false
- def isUnsafe = !isSafe
}
case object NeverSafeToInline extends InlineSafetyInfo
case object InlineableAtThisCaller extends InlineSafetyInfo { override def isSafe = true }
@@ -791,7 +788,7 @@ abstract class Inliners extends SubComponent {
val varsInScope = mutable.HashSet[Local]() ++= block.varsInScope
- /** Side effects varsInScope when it sees SCOPE_ENTERs. */
+ /* Side effects varsInScope when it sees SCOPE_ENTERs. */
def instrBeforeFilter(i: Instruction): Boolean = {
i match { case SCOPE_ENTER(l) => varsInScope += l ; case _ => () }
i ne instr
@@ -804,7 +801,7 @@ abstract class Inliners extends SubComponent {
// store the '$this' into the special local
val inlinedThis = newLocal("$inlThis", REFERENCE(ObjectClass))
- /** buffer for the returned value */
+ /* buffer for the returned value */
val retVal = inc.m.returnType match {
case UNIT => null
case x => newLocal("$retVal", x)
@@ -812,9 +809,9 @@ abstract class Inliners extends SubComponent {
val inlinedLocals = mutable.HashMap.empty[Local, Local]
- /** Add a new block in the current context. */
+ /* Add a new block in the current context. */
def newBlock() = {
- val b = caller.m.code.newBlock
+ val b = caller.m.code.newBlock()
activeHandlers foreach (_ addCoveredBlock b)
if (retVal ne null) b.varsInScope += retVal
b.varsInScope += inlinedThis
@@ -829,7 +826,7 @@ abstract class Inliners extends SubComponent {
handler
}
- /** alfa-rename `l` in caller's context. */
+ /* alfa-rename `l` in caller's context. */
def dupLocal(l: Local): Local = {
val sym = caller.sym.newVariable(freshName(l.sym.name.toString), l.sym.pos)
// sym.setInfo(l.sym.tpe)
@@ -840,10 +837,10 @@ abstract class Inliners extends SubComponent {
val afterBlock = newBlock()
- /** Map from nw.init instructions to their matching NEW call */
+ /* Map from nw.init instructions to their matching NEW call */
val pending: mutable.Map[Instruction, NEW] = new mutable.HashMap
- /** Map an instruction from the callee to one suitable for the caller. */
+ /* Map an instruction from the callee to one suitable for the caller. */
def map(i: Instruction): Instruction = {
def assertLocal(l: Local) = {
assert(caller.locals contains l, "Could not find local '" + l + "' in locals, nor in inlinedLocals: " + inlinedLocals)
@@ -872,7 +869,7 @@ abstract class Inliners extends SubComponent {
r
case CALL_METHOD(meth, Static(true)) if meth.isClassConstructor =>
- CALL_METHOD(meth, Static(true))
+ CALL_METHOD(meth, Static(onInstance = true))
case _ => i.clone()
}
@@ -893,8 +890,8 @@ abstract class Inliners extends SubComponent {
}
// re-emit the instructions before the call
- block.open
- block.clear
+ block.open()
+ block.clear()
block emit instrBefore
// store the arguments into special locals
@@ -903,7 +900,7 @@ abstract class Inliners extends SubComponent {
// jump to the start block of the callee
blockEmit(JUMP(inlinedBlock(inc.m.startBlock)))
- block.close
+ block.close()
// duplicate the other blocks in the callee
val calleeLin = inc.m.linearizedBlocks()
@@ -926,11 +923,11 @@ abstract class Inliners extends SubComponent {
emitInlined(map(i))
info = if(hasRETURN) a.interpret(info, i) else null
}
- inlinedBlock(bb).close
+ inlinedBlock(bb).close()
}
afterBlock emit instrAfter
- afterBlock.close
+ afterBlock.close()
staleIn += afterBlock
splicedBlocks ++= (calleeLin map inlinedBlock)
@@ -938,7 +935,7 @@ abstract class Inliners extends SubComponent {
// add exception handlers of the callee
caller addHandlers (inc.handlers map translateExh)
assert(pending.isEmpty, "Pending NEW elements: " + pending)
- if (settings.debug.value) icodes.checkValid(caller.m)
+ if (settings.debug) icodes.checkValid(caller.m)
}
def isStampedForInlining(stackLength: Int): InlineSafetyInfo = {
@@ -973,7 +970,7 @@ abstract class Inliners extends SubComponent {
}
if(sameSymbols) { // TODO but this also amounts to recursive, ie should lead to adding to tfa.knownNever, right?
- tfa.knownUnsafe += inc.sym;
+ tfa.knownUnsafe += inc.sym
return DontInlineHere("sameSymbols (ie caller == callee)")
}
@@ -1032,7 +1029,6 @@ abstract class Inliners extends SubComponent {
case Public => true
}
private def sameSymbols = caller.sym == inc.sym
- private def sameOwner = caller.owner == inc.owner
/** Gives green light for inlining (which may still be vetoed later). Heuristics:
* - it's bad to make the caller larger (> SMALL_METHOD_SIZE) if it was small
@@ -1048,9 +1044,9 @@ abstract class Inliners extends SubComponent {
if (caller.isInClosure) score -= 2
else if (caller.inlinedCalls < 1) score -= 1 // only monadic methods can trigger the first inline
- if (inc.isSmall) score += 1;
+ if (inc.isSmall) score += 1
// if (inc.hasClosureParam) score += 2
- if (inc.isLarge) score -= 1;
+ if (inc.isLarge) score -= 1
if (caller.isSmall && isLargeSum) {
score -= 1
debuglog(s"inliner score decreased to $score because small caller $caller would become large")
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 7f5f412a20..c341d33a62 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -52,7 +52,7 @@ abstract class Changes {
private val changedTypeParams = new mutable.HashSet[String]
private def sameParameterSymbolNames(sym1: Symbol, sym2: Symbol): Boolean =
- sameSymbol(sym1, sym2, true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
+ sameSymbol(sym1, sym2, simple = true) || sym2.encodedName.startsWith(sym1.encodedName + nme.NAME_JOIN_STRING) // see #3140
private def sameSymbol(sym1: Symbol, sym2: Symbol, simple: Boolean = false): Boolean =
if (simple) sym1.encodedName == sym2.encodedName else sym1.fullName == sym2.fullName
private def sameFlags(sym1: Symbol, sym2: Symbol): Boolean =
@@ -61,12 +61,7 @@ abstract class Changes {
annotationsChecked.forall(a =>
(sym1.hasAnnotation(a) == sym2.hasAnnotation(a)))
- private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = {
- def typeOf(tp: Type): String = tp.toString + "[" + tp.getClass + "]"
- val res = sameType0(tp1, tp2)
- //if (!res) println("\t different types: " + typeOf(tp1) + " : " + typeOf(tp2))
- res
- }
+ private def sameType(tp1: Type, tp2: Type)(implicit strict: Boolean) = sameType0(tp1, tp2)
private def sameType0(tp1: Type, tp2: Type)(implicit strict: Boolean): Boolean = ((tp1, tp2) match {
/*case (ErrorType, _) => false
@@ -95,11 +90,11 @@ abstract class Changes {
} else
!sym1.isTypeParameter || !changedTypeParams.contains(sym1.fullName)
+ // @M! normalize reduces higher-kinded case to PolyType's
testSymbols && sameType(pre1, pre2) &&
(sym1.variance == sym2.variance) &&
((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
sameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
@@ -126,7 +121,7 @@ abstract class Changes {
case (NullaryMethodType(res1), NullaryMethodType(res2)) =>
sameType(res1, res2)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- sameTypeParams(tparams1, tparams2)(false) && sameType(res1, res2)(false)
+ sameTypeParams(tparams1, tparams2)(strict = false) && sameType(res1, res2)(strict = false)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
sameType(lo1, lo2) && sameType(hi1, hi2)
case (BoundedWildcardType(bounds), _) =>
@@ -170,17 +165,16 @@ abstract class Changes {
/** Return the list of changes between 'from' and 'toSym.info'.
*/
def changeSet(from: Type, toSym: Symbol): List[Change] = {
- implicit val defaultReason = "types"
implicit val defaultStrictTypeRefTest = true
val to = toSym.info
- changedTypeParams.clear
+ changedTypeParams.clear()
def omitSymbols(s: Symbol): Boolean = !s.hasFlag(LOCAL | LIFTED | PRIVATE | SYNTHETIC)
val cs = new mutable.ListBuffer[Change]
if ((from.parents zip to.parents) exists { case (t1, t2) => !sameType(t1, t2) })
cs += Changed(toEntity(toSym))(from.parents.zip(to.parents).toString)
- if (!sameTypeParams(from.typeParams, to.typeParams)(false))
+ if (!sameTypeParams(from.typeParams, to.typeParams)(strict = false))
cs += Changed(toEntity(toSym))(" tparams: " + from.typeParams.zip(to.typeParams))
// new members not yet visited
diff --git a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala b/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
deleted file mode 100644
index cdde768274..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/DependencyAnalysis.scala
+++ /dev/null
@@ -1,254 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import io.Path
-import scala.collection._
-import symtab.Flags
-import scala.tools.nsc.io.AbstractFile
-import scala.reflect.internal.util.SourceFile
-
-trait DependencyAnalysis extends SubComponent with Files {
- import global._
-
- val phaseName = "dependencyAnalysis"
-
- def off = settings.make.isDefault || settings.make.value == "all"
- def shouldCheckClasspath = settings.make.value != "transitivenocp"
-
- def newPhase(prev: Phase) = new AnalysisPhase(prev)
-
- private def depPath = Path(settings.dependenciesFile.value)
- def loadDependencyAnalysis(): Boolean = (
- depPath.path != "none" && depPath.isFile && loadFrom(
- AbstractFile.getFile(depPath),
- path => AbstractFile.getFile(depPath.parent resolve Path(path))
- )
- )
- def saveDependencyAnalysis(): Unit = {
- if (!depPath.exists)
- dependenciesFile = AbstractFile.getFile(depPath.createFile())
-
- /** The directory where file lookup should start */
- val rootPath = depPath.parent.normalize
- saveDependencies(
- file => rootPath.relativize(Path(file.file).normalize).path
- )
- }
-
- lazy val maxDepth = settings.make.value match {
- case "changed" => 0
- case "immediate" => 1
- case _ => Int.MaxValue
- }
-
- // todo: order insensible checking and, also checking timestamp?
- def validateClasspath(cp1: String, cp2: String): Boolean = cp1 == cp2
-
- def nameToFile(src: AbstractFile, name: String) =
- settings.outputDirs.outputDirFor(src)
- .lookupPathUnchecked(name.toString.replace(".", java.io.File.separator) + ".class", false)
-
- private var depFile: Option[AbstractFile] = None
-
- def dependenciesFile_=(file: AbstractFile) {
- assert(file ne null)
- depFile = Some(file)
- }
-
- def dependenciesFile: Option[AbstractFile] = depFile
-
- def classpath = settings.classpath.value
- def newDeps = new FileDependencies(classpath)
-
- var dependencies = newDeps
-
- def managedFiles = dependencies.dependencies.keySet
-
- /** Top level definitions per source file. */
- val definitions: mutable.Map[AbstractFile, List[Symbol]] =
- new mutable.HashMap[AbstractFile, List[Symbol]] {
- override def default(f: AbstractFile) = Nil
- }
-
- /** External references used by source file. */
- val references: mutable.Map[AbstractFile, immutable.Set[String]] =
- new mutable.HashMap[AbstractFile, immutable.Set[String]] {
- override def default(f: AbstractFile) = immutable.Set()
- }
-
- /** External references for inherited members used in the source file */
- val inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] =
- new mutable.HashMap[AbstractFile, immutable.Set[Inherited]] {
- override def default(f: AbstractFile) = immutable.Set()
- }
-
- /** Write dependencies to the current file. */
- def saveDependencies(fromFile: AbstractFile => String) =
- if(dependenciesFile.isDefined)
- dependencies.writeTo(dependenciesFile.get, fromFile)
-
- /** Load dependencies from the given file and save the file reference for
- * future saves.
- */
- def loadFrom(f: AbstractFile, toFile: String => AbstractFile): Boolean = {
- dependenciesFile = f
- FileDependencies.readFrom(f, toFile) match {
- case Some(fd) =>
- val success = if (shouldCheckClasspath) validateClasspath(fd.classpath, classpath) else true
- dependencies = if (success) fd else {
- if (settings.debug.value)
- println("Classpath has changed. Nuking dependencies")
- newDeps
- }
-
- success
- case None => false
- }
- }
-
- def calculateFiles(files: List[SourceFile]): List[SourceFile] =
- if (off) files
- else if (dependencies.isEmpty) {
- println("No known dependencies. Compiling " +
- (if (settings.debug.value) files.mkString(", ") else "everything"))
- files
- } else {
- val (direct, indirect) = dependencies.invalidatedFiles(maxDepth);
- val filtered = files.filter(x => {
- val f = x.file.absolute
- direct(f) || indirect(f) || !dependencies.containsFile(f);
- })
- filtered match {
- case Nil => println("No changes to recompile");
- case x => println("Recompiling " + (
- if(settings.debug.value) x.mkString(", ") else x.length + " files")
- )
- }
- filtered
- }
-
- case class Inherited(qualifier: String, member: Name)
-
- class AnalysisPhase(prev: Phase) extends StdPhase(prev) {
-
- override def cancelled(unit: CompilationUnit) =
- super.cancelled(unit) && !unit.isJava
-
- def apply(unit : global.CompilationUnit) {
- val f = unit.source.file.file
- // When we're passed strings by the interpreter
- // they have no source file. We simply ignore this case
- // as irrelevant to dependency analysis.
- if (f != null){
- val source: AbstractFile = unit.source.file;
- for (d <- unit.icode){
- val name = d.toString
- d.symbol match {
- case s : ModuleClassSymbol =>
- val isTopLevelModule = afterPickler { !s.isImplClass && !s.isNestedClass }
-
- if (isTopLevelModule && (s.companionModule != NoSymbol)) {
- dependencies.emits(source, nameToFile(unit.source.file, name))
- }
- dependencies.emits(source, nameToFile(unit.source.file, name + "$"))
- case _ =>
- dependencies.emits(source, nameToFile(unit.source.file, name))
- }
- }
-
- dependencies.reset(source)
- for (d <- unit.depends; if (d.sourceFile != null)){
- dependencies.depends(source, d.sourceFile)
- }
- }
-
- // find all external references in this compilation unit
- val file = unit.source.file
- references += file -> immutable.Set.empty[String]
- inherited += file -> immutable.Set.empty[Inherited]
-
- val buf = new mutable.ListBuffer[Symbol]
-
- (new Traverser {
- override def traverse(tree: Tree) {
- if ((tree.symbol ne null)
- && (tree.symbol != NoSymbol)
- && (!tree.symbol.isPackage)
- && (!tree.symbol.isJavaDefined)
- && (!tree.symbol.tpe.isError)
- && ((tree.symbol.sourceFile eq null)
- || (tree.symbol.sourceFile.path != file.path))
- && (!tree.symbol.isClassConstructor)) {
- updateReferences(tree.symbol.fullName)
- // was "at uncurryPhase.prev", which is actually non-deterministic
- // because the continuations plugin may or may not supply uncurry's
- // immediately preceding phase.
- beforeRefchecks(checkType(tree.symbol.tpe))
- }
-
- tree match {
- case cdef: ClassDef if !cdef.symbol.hasPackageFlag &&
- !cdef.symbol.isAnonymousFunction =>
- if (cdef.symbol != NoSymbol) buf += cdef.symbol
- // was "at erasurePhase.prev"
- beforeExplicitOuter {
- for (s <- cdef.symbol.info.decls)
- s match {
- case ts: TypeSymbol if !ts.isClass =>
- checkType(s.tpe)
- case _ =>
- }
- }
- super.traverse(tree)
-
- case ddef: DefDef =>
- // was "at typer.prev"
- beforeTyper { checkType(ddef.symbol.tpe) }
- super.traverse(tree)
- case a @ Select(q, n) if ((a.symbol != NoSymbol) && (q.symbol != null)) => // #2556
- if (!a.symbol.isConstructor &&
- !a.symbol.owner.isPackageClass &&
- !isSameType(q.tpe, a.symbol.owner.tpe))
- inherited += file ->
- (inherited(file) + Inherited(q.symbol.tpe.resultType.safeToString, n))
- super.traverse(tree)
- case _ =>
- super.traverse(tree)
- }
- }
-
- def checkType(tpe: Type): Unit =
- tpe match {
- case t: MethodType =>
- checkType(t.resultType)
- for (s <- t.params) checkType(s.tpe)
-
- case t: TypeRef =>
- if (t.sym.isAliasType) {
- updateReferences(t.typeSymbolDirect.fullName)
- checkType(t.typeSymbolDirect.info)
- }
- updateReferences(t.typeSymbol.fullName)
- for (tp <- t.args) checkType(tp)
-
- case t: PolyType =>
- checkType(t.resultType)
- updateReferences(t.typeSymbol.fullName)
-
- case t: NullaryMethodType =>
- checkType(t.resultType)
- updateReferences(t.typeSymbol.fullName)
-
- case t =>
- updateReferences(t.typeSymbol.fullName)
- }
-
- def updateReferences(s: String): Unit =
- references += file -> (references(file) + s)
-
- }).apply(unit.body)
-
- definitions(unit.source.file) = buf.toList
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
deleted file mode 100644
index 194351a13f..0000000000
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ /dev/null
@@ -1,177 +0,0 @@
-package scala.tools.nsc
-package dependencies
-
-import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader}
-import io.{AbstractFile, PlainFile, VirtualFile}
-
-import scala.collection._
-
-
-trait Files { self : SubComponent =>
-
- class FileDependencies(val classpath: String) {
- import FileDependencies._
-
- class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]] {
- override def default(key: AbstractFile) = {
- this(key) = new mutable.HashSet[AbstractFile]
- this(key)
- }
- }
-
- val dependencies = new Tracker
- val targets = new Tracker
-
- def isEmpty = dependencies.isEmpty && targets.isEmpty
-
- def emits(source: AbstractFile, result: AbstractFile) =
- targets(source) += result
- def depends(from: AbstractFile, on: AbstractFile) =
- dependencies(from) += on
-
- def reset(file: AbstractFile) = dependencies -= file
-
- def cleanEmpty = {
- dependencies foreach {case (_, value) =>
- value retain (x => x.exists && (x ne removedFile))}
- dependencies retain ((key, value) => key.exists && !value.isEmpty)
- targets foreach {case (_, value) => value retain (_.exists)}
- targets retain ((key, value) => key.exists && !value.isEmpty)
- }
-
- def containsFile(f: AbstractFile) = targets.contains(f.absolute)
-
- def invalidatedFiles(maxDepth: Int) = {
- val direct = new mutable.HashSet[AbstractFile]
-
- for ((file, products) <- targets) {
- // This looks a bit odd. It may seem like one should invalidate a file
- // if *any* of its dependencies are older than it. The forall is there
- // to deal with the fact that a) Some results might have been orphaned
- // and b) Some files might not need changing.
- direct(file) ||= products.forall(d => d.lastModified < file.lastModified)
- }
-
- val indirect = dependentFiles(maxDepth, direct)
-
- for ((source, targets) <- targets
- if direct(source) || indirect(source) || (source eq removedFile)) {
- targets foreach (_.delete)
- targets -= source
- }
-
- (direct, indirect)
- }
-
- /** Return the set of files that depend on the given changed files.
- * It computes the transitive closure up to the given depth.
- */
- def dependentFiles(depth: Int, changed: Set[AbstractFile]): Set[AbstractFile] = {
- val indirect = new mutable.HashSet[AbstractFile]
- val newInvalidations = new mutable.HashSet[AbstractFile]
-
- def invalid(file: AbstractFile) =
- indirect(file) || changed(file) || (file eq removedFile)
-
- def go(i: Int) : Unit = if(i > 0) {
- newInvalidations.clear
- for((target, depends) <- dependencies if !invalid(target);
- d <- depends)
- newInvalidations(target) ||= invalid(d)
-
- indirect ++= newInvalidations
- if (!newInvalidations.isEmpty) go(i - 1)
- }
-
- go(depth)
-
- indirect --= changed
- }
-
- def writeTo(file: AbstractFile, fromFile: AbstractFile => String): Unit =
- writeToFile(file)(out => writeTo(new PrintStream(out), fromFile))
-
- def writeTo(print: PrintStream, fromFile: AbstractFile => String): Unit = {
- def emit(tracker: Tracker) =
- for ((f, ds) <- tracker; d <- ds) print.println(fromFile(f) + arrow + fromFile(d))
-
- cleanEmpty
- print.println(classpath)
- print.println(separator)
- emit(dependencies)
- print.println(separator)
- emit(targets)
- }
- }
-
- object FileDependencies {
- private val separator:String = "-------"
- private val arrow = " -> "
- private val removedFile = new VirtualFile("removed")
-
- private def validLine(l: String) = (l != null) && (l != separator)
-
- def readFrom(file: AbstractFile, toFile: String => AbstractFile): Option[FileDependencies] =
- readFromFile(file) { in =>
- val reader = new BufferedReader(new InputStreamReader(in))
- val it = new FileDependencies(reader.readLine)
-
- def readLines(valid: Boolean)(f: (AbstractFile, AbstractFile) => Unit): Boolean = {
- var continue = valid
- var line: String = null
- while (continue && {line = reader.readLine; validLine(line)}) {
- line.split(arrow) match {
- case Array(from, on) => f(toFile(from), toFile(on))
- case _ =>
- global.inform("Parse error: Unrecognised string " + line)
- continue = false
- }
- }
- continue
- }
-
- reader.readLine
-
- val dResult = readLines(true)(
- (_, _) match {
- case (null, _) => // fromFile is removed, it's ok
- case (fromFile, null) =>
- // onFile is removed, should recompile fromFile
- it.depends(fromFile, removedFile)
- case (fromFile, onFile) => it.depends(fromFile, onFile)
- })
-
- readLines(dResult)(
- (_, _) match {
- case (null, null) =>
- // source and target are all removed, it's ok
- case (null, targetFile) =>
- // source is removed, should remove relative target later
- it.emits(removedFile, targetFile)
- case (_, null) =>
- // it may has been cleaned outside, or removed during last phase
- case (sourceFile, targetFile) => it.emits(sourceFile, targetFile)
- })
-
- Some(it)
- }
- }
-
- def writeToFile[T](file: AbstractFile)(f: OutputStream => T) : T = {
- val out = file.bufferedOutput
- try {
- f(out)
- } finally {
- out.close
- }
- }
-
- def readFromFile[T](file: AbstractFile)(f: InputStream => T) : T = {
- val in = file.input
- try{
- f(in)
- } finally {
- in.close
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala b/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
deleted file mode 100644
index 3e7ac573e9..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/BuildManager.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.reflect.internal.util.FakePos
-
-import dependencies._
-import io.AbstractFile
-import scala.language.implicitConversions
-
-trait BuildManager {
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile])
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile])
-
- /** The given files have been modified by the user. Recompile
- * them and their dependent files.
- */
- def update(added: Set[AbstractFile], removed: Set[AbstractFile])
-
- /** Notification that the supplied set of files is being built */
- def buildingFiles(included: Set[AbstractFile]) {}
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String)
-
- def compiler: scala.tools.nsc.Global
-
- /** Delete classfiles derived from the supplied set of sources */
- def deleteClassfiles(sources : Set[AbstractFile]) {
- val targets = compiler.dependencyAnalysis.dependencies.targets
- for(source <- sources; cf <- targets(source))
- cf.delete
- }
-}
-
-
-/** Simple driver for testing the build manager. It presents
- * the user to a 'resident compiler' prompt. Each line is
- * interpreted as a set of files that have changed. The builder
- * then derives the dependent files and recompiles them.
- */
-object BuildManagerTest extends EvalLoop {
-
- def prompt = "builder > "
-
- private def buildError(msg: String) {
- println(msg + "\n scalac -help gives more information")
- }
-
- def main(args: Array[String]) {
- implicit def filesToSet(fs: List[String]): Set[AbstractFile] = {
- def partition(s: String, r: Tuple2[List[AbstractFile], List[String]])= {
- val v = AbstractFile.getFile(s)
- if (v == null) (r._1, s::r._2) else (v::r._1, r._2)
- }
- val result = fs.foldRight((List[AbstractFile](), List[String]()))(partition)
- if (!result._2.isEmpty)
- Console.err.println("No such file(s): " + result._2.mkString(","))
- Set.empty ++ result._1
- }
-
- val settings = new Settings(buildError)
- settings.Ybuildmanagerdebug.value = true
- val command = new CompilerCommand(args.toList, settings)
-// settings.make.value = "off"
-// val buildManager: BuildManager = new SimpleBuildManager(settings)
- val buildManager: BuildManager = new RefinedBuildManager(settings)
-
- buildManager.addSourceFiles(command.files)
-
- // enter resident mode
- loop { line =>
- val args = line.split(' ').toList
- val command = new CompilerCommand(args, settings)
- buildManager.update(command.files, Set.empty)
- }
-
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
deleted file mode 100644
index b95f1fa7ca..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ /dev/null
@@ -1,285 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import ast.Trees
-import ast.Positions
-import scala.reflect.internal.util.{SourceFile, Position, RangePosition, NoPosition}
-import scala.tools.nsc.util.WorkScheduler
-import scala.collection.mutable.ListBuffer
-
-/** Handling range positions
- * atPos, the main method in this trait, will add positions to a tree,
- * and will ensure the following properties:
- *
- * 1. All nodes between the root of the tree and nodes that already have positions
- * will be assigned positions.
- * 2. No node which already has a position will be assigned a different range; however
- * a RangePosition might become a TransparentPosition.
- * 3. The position of each assigned node includes the positions of each of its children.
- * 4. The positions of all solid descendants of children of an assigned node
- * are mutually non-overlapping.
- *
- * Here, the solid descendant of a node are:
- *
- * If the node has a TransparentPosition, the solid descendants of all its children
- * Otherwise, the singleton consisting of the node itself.
- */
-trait RangePositions extends Trees with Positions {
-self: scala.tools.nsc.Global =>
-
- case class Range(pos: Position, tree: Tree) {
- def isFree = tree == EmptyTree
- }
-
- override def rangePos(source: SourceFile, start: Int, point: Int, end: Int) =
- new RangePosition(source, start, point, end)
-
- /** A position that wraps a set of trees.
- * The point of the wrapping position is the point of the default position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns default position that is either focused or not.
- */
- override def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
- val ranged = trees filter (_.pos.isRange)
- if (ranged.isEmpty) if (focus) default.focus else default
- else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
- }
-
- /** A position that wraps a non-empty set of trees.
- * The point of the wrapping position is the point of the first trees' position.
- * If some of the trees are ranges, returns a range position enclosing all ranges
- * Otherwise returns first tree's position.
- */
- override def wrappingPos(trees: List[Tree]): Position = {
- val headpos = trees.head.pos
- if (headpos.isDefined) wrappingPos(headpos, trees) else headpos
- }
-
- // -------------- ensuring no overlaps -------------------------------
-
- /** Ensure that given tree has no positions that overlap with
- * any of the positions of `others`. This is done by
- * shortening the range, assigning TransparentPositions
- * to some of the nodes in `tree` or focusing on the position.
- */
- override def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
- def isOverlapping(pos: Position) =
- pos.isRange && (others exists (pos overlaps _.pos))
- if (isOverlapping(tree.pos)) {
- val children = tree.children
- children foreach (ensureNonOverlapping(_, others, focus))
- if (tree.pos.isOpaqueRange) {
- val wpos = wrappingPos(tree.pos, children, focus)
- tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
- }
- }
- }
-
- def solidDescendants(tree: Tree): List[Tree] =
- if (tree.pos.isTransparent) tree.children flatMap solidDescendants
- else List(tree)
-
- /** A free range from `lo` to `hi` */
- private def free(lo: Int, hi: Int): Range =
- Range(new RangePosition(null, lo, lo, hi), EmptyTree)
-
- /** The maximal free range */
- private lazy val maxFree: Range = free(0, Int.MaxValue)
-
- /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
- private def maybeFree(lo: Int, hi: Int) =
- if (lo < hi) List(free(lo, hi))
- else List()
-
- /** Insert `pos` into ranges `rs` if possible;
- * otherwise add conflicting trees to `conflicting`.
- */
- private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
- case List() =>
- assert(conflicting.nonEmpty)
- rs
- case r :: rs1 =>
- assert(!t.pos.isTransparent)
- if (r.isFree && (r.pos includes t.pos)) {
-// println("subdividing "+r+"/"+t.pos)
- maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
- } else {
- if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
- r :: insert(rs1, t, conflicting)
- }
- }
-
- /** Replace elem `t` of `ts` by `replacement` list. */
- private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
- if (ts.head == t) replacement ::: ts.tail
- else ts.head :: replace(ts.tail, t, replacement)
-
- /** Does given list of trees have mutually non-overlapping positions?
- * pre: None of the trees is transparent
- */
- def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
- var ranges = List(maxFree)
- for (ct <- cts) {
- if (ct.pos.isOpaqueRange) {
- val conflicting = new ListBuffer[Tree]
- ranges = insert(ranges, ct, conflicting)
- if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
- }
- }
- List()
- }
-
- // -------------- setting positions -------------------------------
-
- /** Set position of all children of a node
- * @param pos A target position.
- * Uses the point of the position as the point of all positions it assigns.
- * Uses the start of this position as an Offset position for unpositioed trees
- * without children.
- * @param trees The children to position. All children must be positionable.
- */
- private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
- for (tree <- trees) {
- if (!tree.isEmpty && tree.pos == NoPosition) {
- val children = tree.children
- if (children.isEmpty) {
- tree setPos pos.focus
- } else {
- setChildrenPos(pos, children)
- tree setPos wrappingPos(pos, children)
- }
- }
- }
- } catch {
- case ex: Exception =>
- println("error while set children pos "+pos+" of "+trees)
- throw ex
- }
-
- /** Position a tree.
- * This means: Set position of a node and position all its unpositioned children.
- */
- override def atPos[T <: Tree](pos: Position)(tree: T): T = {
- if (pos.isOpaqueRange) {
- if (!tree.isEmpty && tree.pos == NoPosition) {
- tree.setPos(pos)
- val children = tree.children
- if (children.nonEmpty) {
- if (children.tail.isEmpty) atPos(pos)(children.head)
- else setChildrenPos(pos, children)
- }
- }
- tree
- } else {
- super.atPos(pos)(tree)
- }
- }
-
- // ---------------- Validating positions ----------------------------------
-
- override def validatePositions(tree: Tree) {
- def reportTree(prefix : String, tree : Tree) {
- val source = if (tree.pos.isDefined) tree.pos.source else ""
- inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
- inform("")
- inform(treeStatus(tree))
- inform("")
- }
-
- def positionError(msg: String)(body : => Unit) {
- inform("======= Position error\n" + msg)
- body
- inform("\nWhile validating #" + tree.id)
- inform(treeStatus(tree))
- inform("\nChildren:")
- tree.children map (t => " " + treeStatus(t, tree)) foreach inform
- inform("=======")
- throw new ValidateException(msg)
- }
-
- def validate(tree: Tree, encltree: Tree): Unit = {
-
- if (!tree.isEmpty) {
- if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
- println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
-
- if (!tree.pos.isDefined)
- positionError("Unpositioned tree #"+tree.id) {
- inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
- inform("%15s %s".format("enclosing", treeStatus(encltree)))
- encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
- }
- if (tree.pos.isRange) {
- if (!encltree.pos.isRange)
- positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
- reportTree("Enclosing", encltree)
- reportTree("Enclosed", tree)
- }
- if (!(encltree.pos includes tree.pos))
- positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
- reportTree("Enclosing", encltree)
- reportTree("Enclosed", tree)
- }
-
- findOverlapping(tree.children flatMap solidDescendants) match {
- case List() => ;
- case xs => {
- positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
- reportTree("Ancestor", tree)
- for((x, y) <- xs) {
- reportTree("First overlapping", x)
- reportTree("Second overlapping", y)
- }
- }
- }
- }
- }
- for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
- }
- }
-
- if (phase.id <= currentRun.typerPhase.id)
- validate(tree, tree)
- }
-
- class ValidateException(msg : String) extends Exception(msg)
-
- // ---------------- Locating trees ----------------------------------
-
- /** A locator for trees with given positions.
- * Given a position `pos`, locator.apply returns
- * the smallest tree that encloses `pos`.
- */
- class Locator(pos: Position) extends Traverser {
- var last: Tree = _
- def locateIn(root: Tree): Tree = {
- this.last = EmptyTree
- traverse(root)
- this.last
- }
- protected def isEligible(t: Tree) = !t.pos.isTransparent
- override def traverse(t: Tree) {
- t match {
- case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
- traverse(tt.original)
- case _ =>
- if (t.pos includes pos) {
- if (isEligible(t)) last = t
- super.traverse(t)
- } else t match {
- case mdef: MemberDef =>
- traverseTrees(mdef.mods.annotations)
- case _ =>
- }
- }
- }
- }
-
- class TypedLocator(pos: Position) extends Locator(pos) {
- override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
deleted file mode 100644
index b2ef45a7d8..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ /dev/null
@@ -1,355 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Iulian Dragos
- * @author Hubert Plocinicak
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import scala.util.control.Breaks._
-import scala.tools.nsc.symtab.Flags
-
-import dependencies._
-import scala.reflect.internal.util.FakePos
-import util.ClassPath
-import io.AbstractFile
-import scala.tools.util.PathResolver
-
-/** A more defined build manager, based on change sets. For each
- * updated source file, it computes the set of changes to its
- * definitions, then checks all dependent units to see if the
- * changes require a compilation. It repeats this process until
- * a fixpoint is reached.
- */
-@deprecated("Use sbt incremental compilation mechanism", "2.10.0")
-class RefinedBuildManager(val settings: Settings) extends Changes with BuildManager {
-
- class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
-
- def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
-
- override def computeInternalPhases() {
- super.computeInternalPhases
- phasesSet += dependencyAnalysis
- }
- lazy val _classpath = new NoSourcePathPathResolver(settings).result
- override def classPath = _classpath.asInstanceOf[ClassPath[platform.BinaryRepr]]
- // See discussion in JavaPlatForm for why we need a cast here.
-
- def newRun() = new Run()
- }
-
- class NoSourcePathPathResolver(settings: Settings) extends PathResolver(settings) {
- override def containers = Calculated.basis.dropRight(1).flatten.distinct
- }
-
- protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
- val compiler = newCompiler(settings)
- import compiler.{ Symbol, Type, beforeErasure }
- import compiler.dependencyAnalysis.Inherited
-
- private case class SymWithHistory(sym: Symbol, befErasure: Type)
-
- /** Managed source files. */
- private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
- private val definitions: mutable.Map[AbstractFile, List[SymWithHistory]] =
- new mutable.HashMap[AbstractFile, List[SymWithHistory]] {
- override def default(key: AbstractFile) = Nil
- }
-
- /** External references used by source file. */
- private var references: mutable.Map[AbstractFile, immutable.Set[String]] = _
-
- /** External references for inherited members */
- private var inherited: mutable.Map[AbstractFile, immutable.Set[Inherited]] = _
-
- /** Reverse of definitions, used for caching */
- private var classes: mutable.Map[String, AbstractFile] =
- new mutable.HashMap[String, AbstractFile] {
- override def default(key: String) = null
- }
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile]) {
- sources ++= files
- update(files)
- }
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile]) {
- sources --= files
- deleteClassfiles(files)
- update(invalidatedByRemove(files))
- }
-
- /** Return the set of invalidated files caused by removing the given files.
- */
- private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
- val changes = new mutable.HashMap[Symbol, List[Change]]
- for (f <- files; SymWithHistory(sym, _) <- definitions(f))
- changes += sym -> List(Removed(Class(sym.fullName)))
- invalidated(files, changes)
- }
-
- def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
- sources --= removed
- deleteClassfiles(removed)
- update(added ++ invalidatedByRemove(removed))
- }
-
- /** The given files have been modified by the user. Recompile
- * them and all files that depend on them. Only files that
- * have been previously added as source files are recompiled.
- * Files that were already compiled are taken out from the result
- * of the dependency analysis.
- */
- private def update(files: Set[AbstractFile]) = {
- val coll: mutable.Map[AbstractFile, immutable.Set[AbstractFile]] =
- mutable.HashMap[AbstractFile, immutable.Set[AbstractFile]]()
- compiler.reporter.reset()
-
- // See if we really have corresponding symbols, not just those
- // which share the name
- def isCorrespondingSym(from: Symbol, to: Symbol): Boolean =
- (from.hasFlag(Flags.TRAIT) == to.hasFlag(Flags.TRAIT)) && // has to run in 2.8, so no hasTraitFlag
- (from.hasFlag(Flags.MODULE) == to.hasFlag(Flags.MODULE))
-
- // For testing purposes only, order irrelevant for compilation
- def toStringSet(set: Set[AbstractFile]): String =
- set.toList sortBy (_.name) mkString("Set(", ", ", ")")
-
- def update0(files: Set[AbstractFile]): Unit = if (!files.isEmpty) {
- deleteClassfiles(files)
- val run = compiler.newRun()
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("compiling " + toStringSet(files))
- buildingFiles(files)
-
- run.compileFiles(files.toList)
- if (compiler.reporter.hasErrors) {
- return
- }
-
- // Deterministic behaviour required by partest
- val changesOf = new mutable.HashMap[Symbol, List[Change]] {
- override def toString: String = {
- val changesOrdered =
- toList.map(e => {
- e._1.toString + " -> " +
- e._2.sortBy(_.toString).mkString("List(", ", ", ")")
- })
- changesOrdered.sorted.mkString("Map(", ", ", ")")
- }
- }
- val additionalDefs: mutable.HashSet[AbstractFile] = mutable.HashSet.empty
-
- val defs = compiler.dependencyAnalysis.definitions
- for (src <- files) {
- if (definitions(src).isEmpty)
- additionalDefs ++= compiler.dependencyAnalysis.
- dependencies.dependentFiles(1, mutable.Set(src))
- else {
- val syms = defs(src)
- for (sym <- syms) {
- definitions(src).find(
- s => (s.sym.fullName == sym.fullName) &&
- isCorrespondingSym(s.sym, sym)) match {
- case Some(SymWithHistory(oldSym, info)) =>
- val changes = changeSet(oldSym.info, sym)
- val changesErasure = beforeErasure(changeSet(info, sym))
-
- changesOf(oldSym) = (changes ++ changesErasure).distinct
- case _ =>
- // a new top level definition
- changesOf(sym) = sym.parentSymbols filter (_.isSealed) map (p =>
- changeChangeSet(p, sym+" extends a sealed "+p))
- }
- }
- // Create a change for the top level classes that were removed
- val removed = definitions(src) filterNot ((s:SymWithHistory) =>
- syms.find(_.fullName == (s.sym.fullName)) != None)
- for (s <- removed) {
- changesOf(s.sym) = List(removeChangeSet(s.sym))
- }
- }
- }
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("Changes: " + changesOf)
- updateDefinitions(files)
- val invalid = invalidated(files, changesOf, additionalDefs)
- update0(checkCycles(invalid, files, coll))
- }
-
- update0(files)
- // remove the current run in order to save some memory
- compiler.dropRun()
- }
-
- // Attempt to break the cycling reference deps as soon as possible and reduce
- // the number of compilations to minimum without having too coarse grained rules
- private def checkCycles(files: Set[AbstractFile], initial: Set[AbstractFile],
- collect: mutable.Map[AbstractFile, immutable.Set[AbstractFile]]):
- Set[AbstractFile] = {
- def followChain(set: Set[AbstractFile], rest: immutable.Set[AbstractFile]):
- immutable.Set[AbstractFile] = {
- val deps:Set[AbstractFile] = set.flatMap(
- s => collect.get(s) match {
- case Some(x) => x
- case _ => Set[AbstractFile]()
- })
- val newDeps = deps -- rest
- if (newDeps.isEmpty) rest else followChain(newDeps, rest ++ newDeps)
- }
- var res:Set[AbstractFile] = mutable.Set()
- files.foreach( f =>
- if (collect contains f) {
- val chain = followChain(Set(f), immutable.Set()) ++ files
- chain.foreach((fc: AbstractFile) => collect += fc -> chain)
- res ++= chain
- } else
- res += f
- )
-
- initial.foreach((f: AbstractFile) => collect += (f -> (collect.getOrElse(f, immutable.Set()) ++ res)))
- if (res.subsetOf(initial)) Set() else res
- }
-
- /** Return the set of source files that are invalidated by the given changes. */
- def invalidated(files: Set[AbstractFile], changesOf: scala.collection.Map[Symbol, List[Change]],
- processed: Set[AbstractFile] = Set.empty):
- Set[AbstractFile] = {
- val buf = new mutable.HashSet[AbstractFile]
- val newChangesOf = new mutable.HashMap[Symbol, List[Change]]
- var directDeps =
- compiler.dependencyAnalysis.dependencies.dependentFiles(1, files)
-
- def invalidate(file: AbstractFile, reason: String, change: Change) = {
- if (settings.Ybuildmanagerdebug.value)
- compiler.inform("invalidate " + file + " because " + reason + " [" + change + "]")
- buf += file
- directDeps -= file
- for (syms <- definitions(file)) // fixes #2557
- newChangesOf(syms.sym) = List(change, parentChangeSet(syms.sym))
- break
- }
-
- for ((oldSym, changes) <- changesOf; change <- changes) {
- def checkParents(cls: Symbol, file: AbstractFile) {
- val parentChange = cls.parentSymbols exists (_.fullName == oldSym.fullName)
- // if (settings.buildmanagerdebug.value)
- // compiler.inform("checkParents " + cls + " oldSym: " + oldSym + " parentChange: " + parentChange + " " + cls.info.parents)
- change match {
- case Changed(Class(_)) if parentChange =>
- invalidate(file, "parents have changed", change)
-
- case Changed(Definition(_)) if parentChange =>
- invalidate(file, "inherited method changed", change)
-
- case Added(Definition(_)) if parentChange =>
- invalidate(file, "inherited new method", change)
-
- case Removed(Definition(_)) if parentChange =>
- invalidate(file, "inherited method removed", change)
-
- case _ => ()
- }
- }
-
- def checkInterface(cls: Symbol, file: AbstractFile) {
- change match {
- case Added(Definition(name)) =>
- if (cls.info.decls.iterator.exists(_.fullName == name))
- invalidate(file, "of new method with existing name", change)
- case Changed(Class(name)) =>
- if (cls.info.typeSymbol.fullName == name)
- invalidate(file, "self type changed", change)
- case _ =>
- ()
- }
- }
-
- def checkReferences(file: AbstractFile) {
- //if (settings.buildmanagerdebug.value)
- // compiler.inform(file + ":" + references(file))
- val refs = references(file)
- if (refs.isEmpty)
- invalidate(file, "it is a direct dependency and we don't yet have finer-grained dependency information", change)
- else {
- change match {
- case Removed(Definition(name)) if refs(name) =>
- invalidate(file, "it references deleted definition", change)
- case Removed(Class(name)) if (refs(name)) =>
- invalidate(file, "it references deleted class", change)
- case Changed(Class(name)) if (refs(name)) =>
- invalidate(file, "it references changed class", change)
- case Changed(Definition(name)) if (refs(name)) =>
- invalidate(file, "it references changed definition", change)
- case Added(Definition(name)) if (refs(name)) =>
- invalidate(file, "it references added definition", change)
- case _ => ()
- }
- }
- }
-
- def checkInheritedReferences(file: AbstractFile) {
- val refs = inherited(file)
- if (!refs.isEmpty)
- change match {
- case ParentChanged(Class(name)) =>
- for (Inherited(q, member) <- refs.find(p => (p != null && p.qualifier == name));
- classFile <- classes.get(q);
- defs <- definitions.get(classFile);
- s <- defs.find(p => p.sym.fullName == q)
- if ((s.sym).tpe.nonPrivateMember(member) == compiler.NoSymbol))
- invalidate(file, "it references invalid (no longer inherited) definition", change)
- ()
- case _ => ()
- }
- }
-
- for (file <- directDeps) {
- breakable {
- for (cls <- definitions(file)) checkParents(cls.sym, file)
- for (cls <- definitions(file)) checkInterface(cls.sym, file)
- checkReferences(file)
- checkInheritedReferences(file)
- }
- }
- }
- if (buf.isEmpty)
- processed
- else
- invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf)
- }
-
- /** Update the map of definitions per source file */
- private def updateDefinitions(files: Set[AbstractFile]) {
- for (src <- files; localDefs = compiler.dependencyAnalysis.definitions(src)) {
- definitions(src) = (localDefs map (s => {
- this.classes += s.fullName -> src
- SymWithHistory(s.cloneSymbol, beforeErasure(s.info.cloneInfo(s)))
- }))
- }
- this.references = compiler.dependencyAnalysis.references
- this.inherited = compiler.dependencyAnalysis.inherited
- }
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
- val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
- if (success)
- sources ++= compiler.dependencyAnalysis.managedFiles
- success
- }
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
- compiler.dependencyAnalysis.dependenciesFile = file
- compiler.dependencyAnalysis.saveDependencies(fromFile)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
deleted file mode 100644
index 465dcaaf1c..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/SimpleBuildManager.scala
+++ /dev/null
@@ -1,103 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
- * @author Martin Odersky
- */
-package scala.tools.nsc
-package interactive
-
-import scala.collection._
-
-import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
-import dependencies._
-
-import scala.reflect.internal.util.FakePos
-import io.AbstractFile
-
-/** A simple build manager, using the default scalac dependency tracker.
- * The transitive closure of all dependent files on a modified file
- * is recompiled at once.
- *
- * It is equivalent to using a resident compiler mode with the
- * '-make:transitive' option.
- */
-class SimpleBuildManager(val settings: Settings) extends BuildManager {
-
- class BuilderGlobal(settings: Settings, reporter : Reporter) extends scala.tools.nsc.Global(settings, reporter) {
-
- def this(settings: Settings) =
- this(settings, new ConsoleReporter(settings))
-
- def newRun() = new Run()
- }
-
- protected def newCompiler(settings: Settings) = new BuilderGlobal(settings)
-
- val compiler = newCompiler(settings)
-
- /** Managed source files. */
- private val sources: mutable.Set[AbstractFile] = new mutable.HashSet[AbstractFile]
-
- /** Add the given source files to the managed build process. */
- def addSourceFiles(files: Set[AbstractFile]) {
- sources ++= files
- update(files)
- }
-
- /** Remove the given files from the managed build process. */
- def removeFiles(files: Set[AbstractFile]) {
- sources --= files
- deleteClassfiles(files)
- update(invalidatedByRemove(files))
- }
-
-
- /** Return the set of invalidated files caused by removing the given files. */
- private def invalidatedByRemove(files: Set[AbstractFile]): Set[AbstractFile] = {
- val deps = compiler.dependencyAnalysis.dependencies
- deps.dependentFiles(Int.MaxValue, files)
- }
-
- def update(added: Set[AbstractFile], removed: Set[AbstractFile]) {
- sources --= removed
- deleteClassfiles(removed)
- update(added ++ invalidatedByRemove(removed))
- }
-
- /** The given files have been modified by the user. Recompile
- * them and all files that depend on them. Only files that
- * have been previously added as source files are recompiled.
- */
- def update(files: Set[AbstractFile]) {
- deleteClassfiles(files)
-
- val deps = compiler.dependencyAnalysis.dependencies
- val run = compiler.newRun()
- compiler.inform("compiling " + files)
-
- val toCompile =
- (files ++ deps.dependentFiles(Int.MaxValue, files)) intersect sources
-
-
- compiler.inform("Recompiling " +
- (if(settings.debug.value) toCompile.mkString(", ")
- else toCompile.size + " files"))
-
- buildingFiles(toCompile)
-
- run.compileFiles(files.toList)
- }
-
- /** Load saved dependency information. */
- def loadFrom(file: AbstractFile, toFile: String => AbstractFile) : Boolean = {
- val success = compiler.dependencyAnalysis.loadFrom(file, toFile)
- if (success)
- sources ++= compiler.dependencyAnalysis.managedFiles
- success
- }
-
- /** Save dependency information to `file`. */
- def saveTo(file: AbstractFile, fromFile: AbstractFile => String) {
- compiler.dependencyAnalysis.dependenciesFile = file
- compiler.dependencyAnalysis.saveDependencies(fromFile)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala b/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
deleted file mode 100644
index 1741a82775..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/CodeHandlers.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import CodeHandlers.NoSuccess
-import scala.util.control.ControlThrowable
-
-/**
- * The start of a simpler interface for utilizing the compiler with piecemeal
- * code strings. The "T" here could potentially be a Tree, a Type, a Symbol,
- * a Boolean, or something even more exotic.
- */
-trait CodeHandlers[T] {
- self =>
-
- // Expressions are composed of operators and operands.
- def expr(code: String): T
-
- // Statements occur as parts of blocks and templates.
- // A statement can be an import, a definition or an expression, or it can be empty.
- // Statements used in the template of a class definition can also be declarations.
- def stmt(code: String): T
- def stmts(code: String): Seq[T]
-
- object opt extends CodeHandlers[Option[T]] {
- val handler: PartialFunction[Throwable, Option[T]] = {
- case _: NoSuccess => None
- }
- val handlerSeq: PartialFunction[Throwable, Seq[Option[T]]] = {
- case _: NoSuccess => Nil
- }
-
- def expr(code: String) = try Some(self.expr(code)) catch handler
- def stmt(code: String) = try Some(self.stmt(code)) catch handler
- def stmts(code: String) = try (self.stmts(code) map (x => Some(x))) catch handlerSeq
- }
-}
-
-object CodeHandlers {
- def incomplete() = throw CodeIncomplete
- def fail(msg: String) = throw new CodeException(msg)
-
- trait NoSuccess extends ControlThrowable
- class CodeException(msg: String) extends RuntimeException(msg) with NoSuccess { }
- object CodeIncomplete extends CodeException("CodeIncomplete")
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala b/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
deleted file mode 100644
index e3c0494fa3..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/ILoopInit.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package interpreter
-
-import scala.reflect.internal.util.Position
-import scala.util.control.Exception.ignoring
-import scala.tools.nsc.util.stackTraceString
-
-/**
- * Machinery for the asynchronous initialization of the repl.
- */
-trait ILoopInit {
- self: ILoop =>
-
- /** Print a welcome message */
- def printWelcome() {
- import Properties._
- val welcomeMsg =
- """|Welcome to Scala %s (%s, Java %s).
- |Type in expressions to have them evaluated.
- |Type :help for more information.""" .
- stripMargin.format(versionString, javaVmName, javaVersion)
- echo(welcomeMsg)
- replinfo("[info] started at " + new java.util.Date)
- }
-
- protected def asyncMessage(msg: String) {
- if (isReplInfo || isReplPower)
- echoAndRefresh(msg)
- }
-
- private val initLock = new java.util.concurrent.locks.ReentrantLock()
- private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
- private val initLoopCondition = initLock.newCondition() // signal the whole repl is initialized
- private val initStart = System.nanoTime
-
- private def withLock[T](body: => T): T = {
- initLock.lock()
- try body
- finally initLock.unlock()
- }
- // a condition used to ensure serial access to the compiler.
- @volatile private var initIsComplete = false
- @volatile private var initError: String = null
- private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
-
- // the method to be called when the interpreter is initialized.
- // Very important this method does nothing synchronous (i.e. do
- // not try to use the interpreter) because until it returns, the
- // repl's lazy val `global` is still locked.
- protected def initializedCallback() = withLock(initCompilerCondition.signal())
-
- // Spins off a thread which awaits a single message once the interpreter
- // has been initialized.
- protected def createAsyncListener() = {
- io.spawn {
- withLock(initCompilerCondition.await())
- asyncMessage("[info] compiler init time: " + elapsed() + " s.")
- postInitialization()
- }
- }
-
- // called from main repl loop
- protected def awaitInitialized(): Boolean = {
- if (!initIsComplete)
- withLock { while (!initIsComplete) initLoopCondition.await() }
- if (initError != null) {
- println("""
- |Failed to initialize the REPL due to an unexpected error.
- |This is a bug, please, report it along with the error diagnostics printed below.
- |%s.""".stripMargin.format(initError)
- )
- false
- } else true
- }
- // private def warningsThunks = List(
- // () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
- // )
-
- protected def postInitThunks = List[Option[() => Unit]](
- Some(intp.setContextClassLoader _),
- if (isReplPower) Some(() => enablePowerMode(true)) else None
- ).flatten
- // ++ (
- // warningsThunks
- // )
- // called once after init condition is signalled
- protected def postInitialization() {
- try {
- postInitThunks foreach (f => addThunk(f()))
- runThunks()
- } catch {
- case ex: Throwable =>
- initError = stackTraceString(ex)
- throw ex
- } finally {
- initIsComplete = true
-
- if (isAsync) {
- asyncMessage("[info] total init time: " + elapsed() + " s.")
- withLock(initLoopCondition.signal())
- }
- }
- }
- // code to be executed only after the interpreter is initialized
- // and the lazy val `global` can be accessed without risk of deadlock.
- private var pendingThunks: List[() => Unit] = Nil
- protected def addThunk(body: => Unit) = synchronized {
- pendingThunks :+= (() => body)
- }
- protected def runThunks(): Unit = synchronized {
- if (pendingThunks.nonEmpty)
- repldbg("Clearing " + pendingThunks.size + " thunks.")
-
- while (pendingThunks.nonEmpty) {
- val thunk = pendingThunks.head
- pendingThunks = pendingThunks.tail
- thunk()
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/package.scala b/src/compiler/scala/tools/nsc/interpreter/package.scala
deleted file mode 100644
index e3440c9f8b..0000000000
--- a/src/compiler/scala/tools/nsc/interpreter/package.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-
-import scala.language.implicitConversions
-
-/** The main REPL related classes and values are as follows.
- * In addition to standard compiler classes Global and Settings, there are:
- *
- * History: an interface for session history.
- * Completion: an interface for tab completion.
- * ILoop (formerly InterpreterLoop): The umbrella class for a session.
- * IMain (formerly Interpreter): Handles the evolving state of the session
- * and handles submitting code to the compiler and handling the output.
- * InteractiveReader: how ILoop obtains input.
- * History: an interface for session history.
- * Completion: an interface for tab completion.
- * Power: a repository for more advanced/experimental features.
- *
- * ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
- * InteractiveReader contains { history: History, completion: Completion }
- * IMain contains { global: Global }
- */
-package object interpreter extends ReplConfig with ReplStrings {
- type JFile = java.io.File
- type JClass = java.lang.Class[_]
- type JList[T] = java.util.List[T]
- type JCollection[T] = java.util.Collection[T]
- type JPrintWriter = java.io.PrintWriter
- type InputStream = java.io.InputStream
- type OutputStream = java.io.OutputStream
-
- val IR = Results
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
-
- private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
- import scala.collection.JavaConverters._
- xs.asScala.toList map ("" + _)
- }
-
- private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
- private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
- private[nsc] def tracing[T](msg: String)(x: T): T = x.tapTrace(msg)
- private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Fileish.scala b/src/compiler/scala/tools/nsc/io/Fileish.scala
deleted file mode 100644
index 7b4e385dd8..0000000000
--- a/src/compiler/scala/tools/nsc/io/Fileish.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import java.io.{ InputStream }
-import java.util.jar.JarEntry
-
-/** A common interface for File-based things and Stream-based things.
- * (In particular, io.File and JarEntry.)
- */
-class Fileish(val path: Path, val input: () => InputStream) extends Streamable.Chars {
- def inputStream() = input()
-
- def parent = path.parent
- def name = path.name
- def isSourceFile = path.hasExtension("java", "scala")
-
- private lazy val pkgLines = lines() collect { case x if x startsWith "package " => x stripPrefix "package" trim }
- lazy val pkgFromPath = parent.path.replaceAll("""[/\\]""", ".")
- lazy val pkgFromSource = pkgLines map (_ stripSuffix ";") mkString "."
-
- override def toString = path.path
-}
-
-object Fileish {
- def apply(f: File): Fileish = new Fileish(f, () => f.inputStream())
- def apply(f: JarEntry, in: () => InputStream): Fileish = new Fileish(Path(f.getName), in)
- def apply(path: String, in: () => InputStream): Fileish = new Fileish(Path(path), in)
-}
diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala
index e919621338..2967f67e9c 100644
--- a/src/compiler/scala/tools/nsc/io/Jar.scala
+++ b/src/compiler/scala/tools/nsc/io/Jar.scala
@@ -10,8 +10,7 @@ import java.io.{ InputStream, OutputStream, IOException, FileNotFoundException,
import java.util.jar._
import scala.collection.JavaConverters._
import Attributes.Name
-import util.ClassPath
-import scala.language.implicitConversions
+import scala.language.{ implicitConversions, postfixOps }
// Attributes.Name instances:
//
@@ -37,9 +36,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
def this(jfile: JFile) = this(File(jfile))
def this(path: String) = this(File(path))
- protected def errorFn(msg: String): Unit = Console println msg
-
- lazy val jarFile = new JarFile(file.jfile)
lazy val manifest = withJarInput(s => Option(s.getManifest))
def mainClass = manifest map (f => f(Name.MAIN_CLASS))
@@ -51,6 +47,20 @@ class Jar(file: File) extends Iterable[JarEntry] {
case _ => Nil
}
+ /** Invoke f with input for named jar entry (or None). */
+ def withEntryStream[A](name: String)(f: Option[InputStream] => A) = {
+ val jarFile = new JarFile(file.jfile)
+ def apply() =
+ jarFile getEntry name match {
+ case null => f(None)
+ case entry =>
+ val in = Some(jarFile getInputStream entry)
+ try f(in)
+ finally in map (_.close())
+ }
+ try apply() finally jarFile.close()
+ }
+
def withJarInput[T](f: JarInputStream => T): T = {
val in = new JarInputStream(file.inputStream())
try f(in)
@@ -64,12 +74,6 @@ class Jar(file: File) extends Iterable[JarEntry] {
Iterator continually in.getNextJarEntry() takeWhile (_ != null) foreach f
}
override def iterator: Iterator[JarEntry] = this.toList.iterator
- def fileishIterator: Iterator[Fileish] = jarFile.entries.asScala map (x => Fileish(x, () => getEntryStream(x)))
-
- private def getEntryStream(entry: JarEntry) = jarFile getInputStream entry match {
- case null => errorFn("No such entry: " + entry) ; null
- case x => x
- }
override def toString = "" + file
}
@@ -111,9 +115,9 @@ class JarWriter(val file: File, val manifest: Manifest) {
val buf = new Array[Byte](10240)
def loop(): Unit = in.read(buf, 0, buf.length) match {
case -1 => in.close()
- case n => out.write(buf, 0, n) ; loop
+ case n => out.write(buf, 0, n) ; loop()
}
- loop
+ loop()
}
def close() = out.close()
@@ -131,7 +135,6 @@ object Jar {
m
}
def apply(manifest: JManifest): WManifest = new WManifest(manifest)
- implicit def unenrichManifest(x: WManifest): JManifest = x.underlying
}
class WManifest(manifest: JManifest) {
for ((k, v) <- initialMainAttrs)
@@ -148,12 +151,7 @@ object Jar {
}
def apply(name: Attributes.Name): String = attrs(name)
- def apply(name: String): String = apply(new Attributes.Name(name))
def update(key: Attributes.Name, value: String) = attrs.put(key, value)
- def update(key: String, value: String) = attrs.put(new Attributes.Name(key), value)
-
- def mainClass: String = apply(Name.MAIN_CLASS)
- def mainClass_=(value: String) = update(Name.MAIN_CLASS, value)
}
// See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html
@@ -161,7 +159,7 @@ object Jar {
private val ZipMagicNumber = List[Byte](80, 75, 3, 4)
private def magicNumberIsZip(f: Path) = f.isFile && (f.toFile.bytes().take(4).toList == ZipMagicNumber)
- def isJarOrZip(f: Path): Boolean = isJarOrZip(f, true)
+ def isJarOrZip(f: Path): Boolean = isJarOrZip(f, examineFile = true)
def isJarOrZip(f: Path, examineFile: Boolean): Boolean =
f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f))
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/compiler/scala/tools/nsc/io/Lexer.scala
index 5ffb5b4d4f..1c926aff6b 100644
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ b/src/compiler/scala/tools/nsc/io/Lexer.scala
@@ -1,16 +1,14 @@
package scala.tools.nsc.io
-import java.io.{Reader, Writer, StringReader, StringWriter}
-import scala.collection.mutable.{Buffer, ArrayBuffer}
-import scala.math.BigInt
+import java.io.Reader
/** Companion object of class `Lexer` which defines tokens and some utility concepts
* used for tokens and lexers
*/
object Lexer {
- /** An exception raised if a if input does not correspond to what's expected
- * @param rdr the lexer form which the bad input is read
+ /** An exception raised if an input does not correspond to what's expected
+ * @param rdr the lexer from which the bad input is read
* @param msg the error message
*/
class MalformedInput(val rdr: Lexer, val msg: String) extends Exception("Malformed JSON input at "+rdr.tokenPos+": "+msg)
@@ -25,7 +23,7 @@ object Lexer {
/** A subclass of `Token` representing single-character delimiters
* @param char the delimiter character making up this token
*/
- case class Delim(char: Char) extends Token("'"+char.toString+"'")
+ case class Delim(char: Char) extends Token(s"'$char'")
/** A subclass of token representing integer literals */
case class IntLit(override val str: String) extends Token(str)
@@ -50,7 +48,7 @@ object Lexer {
/** The '`(`' token */
val LParen = new Delim('(')
- /** The '`(`' token */
+ /** The '`)`' token */
val RParen = new Delim(')')
/** The '`{`' token */
@@ -90,7 +88,7 @@ object Lexer {
case '\\' => buf ++= "\\\\"
case _ =>
if (' ' <= ch && ch < 128) buf += ch
- else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch)
+ else buf ++= "\\u" += toUDigit(ch >>> 12) += toUDigit(ch >>> 8) += toUDigit(ch >>> 4) += toUDigit(ch.toInt)
}
}
@@ -280,7 +278,7 @@ class Lexer(rd: Reader) {
/** The current token is a delimiter consisting of given character, reads next token,
* otherwise raises an error.
- * @param c the given delimiter character to compare current token with
+ * @param ch the given delimiter character to compare current token with
* @throws MalformedInput if the current token `token` is not a delimiter, or
* consists of a character different from `c`.
*/
diff --git a/src/compiler/scala/tools/nsc/io/MsilFile.scala b/src/compiler/scala/tools/nsc/io/MsilFile.scala
deleted file mode 100644
index 2f0a71fc60..0000000000
--- a/src/compiler/scala/tools/nsc/io/MsilFile.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package io
-
-import ch.epfl.lamp.compiler.msil.{ Type => MsilType, _ }
-
-/** This class wraps an MsilType. It exists only so
- * ClassPath can treat all of JVM/MSIL/bin/src files
- * uniformly, as AbstractFiles.
- */
-class MsilFile(val msilType: MsilType) extends VirtualFile(msilType.FullName, msilType.Namespace) {
-}
-
-object NoMsilFile extends MsilFile(null) { }
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/compiler/scala/tools/nsc/io/Pickler.scala
index b03a921e87..0e7da37c52 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/io/Pickler.scala
@@ -1,6 +1,5 @@
package scala.tools.nsc.io
-import scala.annotation.unchecked
import Lexer._
import java.io.Writer
import scala.language.implicitConversions
@@ -19,7 +18,7 @@ import scala.reflect.ClassTag
* Subclasses of `Pickler` each can write and read individual classes
* of values.
*
- * @param T the type of values handled by this pickler.
+ * @tparam T the type of values handled by this pickler.
*
* These Picklers build on the work of Andrew Kennedy. They are most closely inspired by
* Iulian Dragos' picklers for Scala to XML. See:
@@ -71,17 +70,9 @@ abstract class Pickler[T] {
*/
def wrapped [U] (in: T => U)(out: U => T): Pickler[U] = wrappedPickler(this)(in)(out)
- /** A pickler obtained from the current pickler by also admitting `null` as
- * a handled value, represented as the token `null`.
- *
- * @param fromNull an implicit evidence parameter ensuring that the type of values
- * handled by this pickler contains `null`.
- */
- def orNull(implicit fromNull: Null <:< T): Pickler[T] = nullablePickler(this)
-
/** A conditional pickler obtained from the current pickler.
- * @param cond the condition to test to find out whether pickler can handle
- * some Scala value.
+ * @param p the condition to test to find out whether pickler can handle
+ * some Scala value.
*/
def cond(p: Any => Boolean): CondPickler[T] = conditionalPickler(this, p)
@@ -93,13 +84,10 @@ abstract class Pickler[T] {
}
object Pickler {
-
- var picklerDebugMode = false
-
/** A base class representing unpickler result. It has two subclasses:
* `UnpickleSucess` for successful unpicklings and `UnpickleFailure` for failures,
* where a value of the given type `T` could not be unpickled from input.
- * @param T the type of unpickled values in case of success.
+ * @tparam T the type of unpickled values in case of success.
*/
abstract class Unpickled[+T] {
/** Transforms success values to success values using given function,
@@ -137,7 +125,7 @@ object Pickler {
}
/** A class representing successful unpicklings
- * @param T the type of the unpickled value
+ * @tparam T the type of the unpickled value
* @param result the unpickled value
*/
case class UnpickleSuccess[+T](result: T) extends Unpickled[T]
@@ -175,22 +163,11 @@ object Pickler {
def ~ [T](y: T): S ~ T = new ~ (x, y)
}
- /** A converter from binary functions to functions over `~`-pairs
- */
- implicit def fromTilde[T1, T2, R](f: (T1, T2) => R): T1 ~ T2 => R = { case x1 ~ x2 => f(x1, x2) }
-
- /** An converter from unctions returning Options over pair to functions returning `~`-pairs
- * The converted function will raise a `MatchError` where the original function returned
- * a `None`. This converter is useful for turning `unapply` methods of case classes
- * into wrapper methods that can be passed as second argument to `wrap`.
- */
- implicit def toTilde[T1, T2, S](f: S => Option[(T1, T2)]): S => T1 ~ T2 = { x => (f(x): @unchecked) match { case Some((x1, x2)) => x1 ~ x2 } }
-
/** Same as `p.labelled(label)`.
*/
def labelledPickler[T](label: String, p: Pickler[T]): Pickler[T] = new Pickler[T] {
def pickle(wr: Writer, x: T) = {
- wr.write(quoted(label));
+ wr.write(quoted(label))
wr.write("(")
p.pickle(wr, x)
wr.write(")")
@@ -249,16 +226,6 @@ object Pickler {
def unpickle(rd: Lexer) = p.unpickle(rd) orElse qq.unpickle(rd)
}
- /** Same as `p.orNull`
- */
- def nullablePickler[T](p: Pickler[T])(implicit fromNull: Null <:< T): Pickler[T] = new Pickler[T] {
- def pickle(wr: Writer, x: T) =
- if (x == null) wr.write("null") else p.pickle(wr, x)
- def unpickle(rd: Lexer): Unpickled[T] =
- if (rd.token == NullLit) nextSuccess(rd, fromNull(null))
- else p.unpickle(rd)
- }
-
/** A conditional pickler for singleton objects. It represents these
* with the object's underlying class as a label.
* Example: Object scala.None would be represented as `scala.None$()`.
@@ -330,22 +297,9 @@ object Pickler {
implicit val longPickler: Pickler[Long] =
tokenPickler("integer literal") { case IntLit(s) => s.toLong }
- /** A pickler for values of type `Double`, represented as floating point literals */
- implicit val doublePickler: Pickler[Double] =
- tokenPickler("floating point literal") { case FloatLit(s) => s.toDouble }
-
- /** A pickler for values of type `Byte`, represented as integer literals */
- implicit val bytePickler: Pickler[Byte] = longPickler.wrapped { _.toByte } { _.toLong }
-
- /** A pickler for values of type `Short`, represented as integer literals */
- implicit val shortPickler: Pickler[Short] = longPickler.wrapped { _.toShort } { _.toLong }
-
/** A pickler for values of type `Int`, represented as integer literals */
implicit val intPickler: Pickler[Int] = longPickler.wrapped { _.toInt } { _.toLong }
- /** A pickler for values of type `Float`, represented as floating point literals */
- implicit val floatPickler: Pickler[Float] = doublePickler.wrapped { _.toFloat } { _.toLong }
-
/** A conditional pickler for the boolean value `true` */
private val truePickler =
tokenPickler("boolean literal") { case TrueLit => true } cond { _ == true }
@@ -373,11 +327,6 @@ object Pickler {
}
}
- /** A pickler for values of type `Char`, represented as string literals of length 1 */
- implicit val charPickler: Pickler[Char] =
- stringPickler
- .wrapped { s => require(s.length == 1, "single character string literal expected, but "+quoted(s)+" found"); s(0) } { _.toString }
-
/** A pickler for pairs, represented as `~`-pairs */
implicit def tuple2Pickler[T1: Pickler, T2: Pickler]: Pickler[(T1, T2)] =
(pkl[T1] ~ pkl[T2])
@@ -390,35 +339,9 @@ object Pickler {
.wrapped { case x1 ~ x2 ~ x3 => (x1, x2, x3) } { case (x1, x2, x3) => x1 ~ x2 ~ x3 }
.labelled ("tuple3")
- /** A pickler for 4-tuples, represented as `~`-tuples */
- implicit def tuple4Pickler[T1, T2, T3, T4](implicit p1: Pickler[T1], p2: Pickler[T2], p3: Pickler[T3], p4: Pickler[T4]): Pickler[(T1, T2, T3, T4)] =
- (p1 ~ p2 ~ p3 ~ p4)
- .wrapped { case x1 ~ x2 ~ x3 ~ x4 => (x1, x2, x3, x4) } { case (x1, x2, x3, x4) => x1 ~ x2 ~ x3 ~ x4 }
- .labelled ("tuple4")
-
- /** A conditional pickler for the `scala.None` object */
- implicit val nonePickler = singletonPickler(None)
-
- /** A conditional pickler for instances of class `scala.Some` */
- implicit def somePickler[T: Pickler]: CondPickler[Some[T]] =
- pkl[T]
- .wrapped { Some(_) } { _.get }
- .asClass (classOf[Some[T]])
-
- /** A pickler for optional values */
- implicit def optionPickler[T: Pickler]: Pickler[Option[T]] = nonePickler | somePickler[T]
-
/** A pickler for list values */
implicit def listPickler[T: Pickler]: Pickler[List[T]] =
iterPickler[T] .wrapped { _.toList } { _.iterator } .labelled ("scala.List")
-
- /** A pickler for vector values */
- implicit def vectorPickler[T: Pickler]: Pickler[Vector[T]] =
- iterPickler[T] .wrapped { Vector() ++ _ } { _.iterator } .labelled ("scala.Vector")
-
- /** A pickler for array values */
- implicit def array[T : ClassTag : Pickler]: Pickler[Array[T]] =
- iterPickler[T] .wrapped { _.toArray} { _.iterator } .labelled ("scala.Array")
}
/** A subclass of Pickler can indicate whether a particular value can be pickled by instances
@@ -444,8 +367,8 @@ abstract class CondPickler[T](val canPickle: Any => Boolean) extends Pickler[T]
* To unpickle a value, this unpickler is tried first. If it cannot read
* the input (as indicated by a `UnpickleFailure` result), then the
* alternative pickler is tried.
- * @param V The handled type of the returned pickler.
- * @param U The handled type of the alternative pickler.
+ * @tparam V The handled type of the returned pickler.
+ * @tparam U The handled type of the alternative pickler.
* @param that The alternative pickler.
*/
def | [V >: T, U <: V] (that: => CondPickler[U]): CondPickler[V] =
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
index acd4847469..11d3703983 100644
--- a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
+++ b/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
@@ -16,7 +16,7 @@ class PrettyWriter(wr: Writer) extends Writer {
str(off) match {
case '{' | '[' | '(' =>
indent += 1
- wr.write(str(off))
+ wr.write(str(off).toInt)
newLine()
wr.write(str, off + 1, len - 1)
case '}' | ']' | ')' =>
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/compiler/scala/tools/nsc/io/Replayer.scala
index 5cb61b6cb1..e3dc8939a3 100644
--- a/src/compiler/scala/tools/nsc/io/Replayer.scala
+++ b/src/compiler/scala/tools/nsc/io/Replayer.scala
@@ -3,7 +3,7 @@ package scala.tools.nsc.io
import java.io.{Reader, Writer}
import Pickler._
-import Lexer.{Token, EOF}
+import Lexer.EOF
abstract class LogReplay {
def logreplay(event: String, x: => Boolean): Boolean
diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala
index e766c1b2fd..4925c50d85 100644
--- a/src/compiler/scala/tools/nsc/io/Socket.scala
+++ b/src/compiler/scala/tools/nsc/io/Socket.scala
@@ -28,13 +28,10 @@ object Socket {
private val optHandler = handlerFn[Option[T]](_ => None)
private val eitherHandler = handlerFn[Either[Throwable, T]](x => Left(x))
- def getOrElse[T1 >: T](alt: T1): T1 = opt getOrElse alt
def either: Either[Throwable, T] = try Right(f()) catch eitherHandler
def opt: Option[T] = try Some(f()) catch optHandler
}
- def newIPv4Server(port: Int = 0) = new Box(() => preferringIPv4(new ServerSocket(0)))
- def newServer(port: Int = 0) = new Box(() => new ServerSocket(0))
def localhost(port: Int) = apply(InetAddress.getLocalHost(), port)
def apply(host: InetAddress, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
def apply(host: String, port: Int) = new Box(() => new Socket(new JSocket(host, port)))
@@ -62,4 +59,4 @@ class Socket(jsocket: JSocket) extends Streamable.Bytes with Closeable {
out.close()
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala
index 569270f530..3220c2e2b2 100644
--- a/src/compiler/scala/tools/nsc/io/SourceReader.scala
+++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala
@@ -9,7 +9,7 @@ package io
import java.io.{ FileInputStream, InputStream, IOException }
import java.nio.{ByteBuffer, CharBuffer}
-import java.nio.channels.{FileChannel, ReadableByteChannel, Channels}
+import java.nio.channels.{ ReadableByteChannel, Channels }
import java.nio.charset.{CharsetDecoder, CoderResult}
import scala.tools.nsc.reporters._
@@ -33,9 +33,6 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
"Please try specifying another one using the -encoding option")
}
- /** Reads the file with the specified name. */
- def read(filename: String): Array[Char]= read(new JFile(filename))
-
/** Reads the specified file. */
def read(file: JFile): Array[Char] = {
val c = new FileInputStream(file).getChannel
@@ -77,7 +74,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) {
protected def read(bytes: ByteBuffer): Array[Char] = {
val decoder: CharsetDecoder = this.decoder.reset()
val chars: CharBuffer = this.chars; chars.clear()
- terminate(flush(decoder, decode(decoder, bytes, chars, true)))
+ terminate(flush(decoder, decode(decoder, bytes, chars, endOfInput = true)))
}
//########################################################################
diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala
index 711696bb6e..0b2db115fb 100644
--- a/src/compiler/scala/tools/nsc/io/package.scala
+++ b/src/compiler/scala/tools/nsc/io/package.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
import java.util.concurrent.{ Future, Callable }
import java.util.{ Timer, TimerTask }
-import java.util.jar.{ Attributes }
import scala.language.implicitConversions
package object io {
@@ -21,14 +20,10 @@ package object io {
type Path = scala.reflect.io.Path
val Path = scala.reflect.io.Path
type PlainFile = scala.reflect.io.PlainFile
- val PlainFile = scala.reflect.io.PlainFile
val Streamable = scala.reflect.io.Streamable
type VirtualDirectory = scala.reflect.io.VirtualDirectory
type VirtualFile = scala.reflect.io.VirtualFile
- val ZipArchive = scala.reflect.io.ZipArchive
type ZipArchive = scala.reflect.io.ZipArchive
-
- implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
type JManifest = java.util.jar.Manifest
type JFile = java.io.File
@@ -39,23 +34,11 @@ package object io {
def runnable(body: => Unit): Runnable = new Runnable { override def run() = body }
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
def spawn[T](body: => T): Future[T] = daemonThreadPool submit callable(body)
- def submit(runnable: Runnable) = daemonThreadPool submit runnable
- // Create, start, and return a daemon thread
- def daemonize(body: => Unit): Thread = newThread(_ setDaemon true)(body)
def newThread(f: Thread => Unit)(body: => Unit): Thread = {
val thread = new Thread(runnable(body))
f(thread)
thread.start
thread
}
-
- // Set a timer to execute the given code.
- def timer(seconds: Int)(body: => Unit): Timer = {
- val alarm = new Timer(true) // daemon
- val tt = new TimerTask { def run() = body }
-
- alarm.schedule(tt, seconds * 1000)
- alarm
- }
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 0779e648cd..2a799acbc7 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -35,7 +35,6 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
abstract class JavaParser extends ParserCommon {
val in: JavaScanner
- protected def posToReport: Int = in.currentPos
def freshName(prefix : String): Name
protected implicit def i2p(offset : Int) : Position
private implicit def p2i(pos : Position): Int = if (pos.isDefined) pos.point else -1
@@ -75,7 +74,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
nbraces += 1
case _ =>
}
- in.nextToken
+ in.nextToken()
}
}
@@ -94,11 +93,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (skipIt)
skip()
}
- def warning(msg: String) : Unit = warning(in.currentPos, msg)
-
def errorTypeTree = TypeTree().setType(ErrorType) setPos in.currentPos
- def errorTermTree = Literal(Constant(null)) setPos in.currentPos
- def errorPatternTree = blankExpr setPos in.currentPos
// --------- tree building -----------------------------
@@ -130,7 +125,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def makeSyntheticParam(count: Int, tpt: Tree): ValDef =
makeParam(nme.syntheticParamName(count), tpt)
def makeParam(name: String, tpt: Tree): ValDef =
- makeParam(newTypeName(name), tpt)
+ makeParam(name: TermName, tpt)
def makeParam(name: TermName, tpt: Tree): ValDef =
ValDef(Modifiers(Flags.JAVA | Flags.PARAM), name, tpt, EmptyTree)
@@ -153,7 +148,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
nbraces += 1
case _ =>
}
- in.nextToken
+ in.nextToken()
in.token match {
case RPAREN =>
nparens -= 1
@@ -168,7 +163,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (!(tokens contains in.token) && in.token != EOF) {
if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
else if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
- else in.nextToken
+ else in.nextToken()
}
}
@@ -178,18 +173,14 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def accept(token: Int): Int = {
val pos = in.currentPos
if (in.token != token) {
- val posToReport =
- //if (in.currentPos.line(unit.source).get(0) > in.lastPos.line(unit.source).get(0))
- // in.lastPos
- //else
- in.currentPos
+ val posToReport = in.currentPos
val msg =
JavaScannerConfiguration.token2string(token) + " expected but " +
JavaScannerConfiguration.token2string(in.token) + " found."
- syntaxError(posToReport, msg, true)
+ syntaxError(posToReport, msg, skipIt = true)
}
- if (in.token == token) in.nextToken
+ if (in.token == token) in.nextToken()
pos
}
@@ -209,7 +200,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def ident(): Name =
if (in.token == IDENTIFIER) {
val name = in.name
- in.nextToken
+ in.nextToken()
name
} else {
accept(IDENTIFIER)
@@ -219,7 +210,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def repsep[T <: Tree](p: () => T, sep: Int): List[T] = {
val buf = ListBuffer[T](p())
while (in.token == sep) {
- in.nextToken
+ in.nextToken()
buf += p()
}
buf.toList
@@ -233,7 +224,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case AppliedTypeTree(_, _) | ExistentialTypeTree(_, _) | SelectFromTypeTree(_, _) =>
tree
case _ =>
- syntaxError(tree.pos, "identifier expected", false)
+ syntaxError(tree.pos, "identifier expected", skipIt = false)
errorTypeTree
}
}
@@ -243,7 +234,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def qualId(): RefTree = {
var t: RefTree = atPos(in.currentPos) { Ident(ident()) }
while (in.token == DOT) {
- in.nextToken
+ in.nextToken()
t = atPos(in.currentPos) { Select(t, ident()) }
}
t
@@ -252,7 +243,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def optArrayBrackets(tpt: Tree): Tree =
if (in.token == LBRACKET) {
val tpt1 = atPos(in.pos) { arrayOf(tpt) }
- in.nextToken
+ in.nextToken()
accept(RBRACKET)
optArrayBrackets(tpt1)
} else tpt
@@ -260,21 +251,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def basicType(): Tree =
atPos(in.pos) {
in.token match {
- case BYTE => in.nextToken; TypeTree(ByteClass.tpe)
- case SHORT => in.nextToken; TypeTree(ShortClass.tpe)
- case CHAR => in.nextToken; TypeTree(CharClass.tpe)
- case INT => in.nextToken; TypeTree(IntClass.tpe)
- case LONG => in.nextToken; TypeTree(LongClass.tpe)
- case FLOAT => in.nextToken; TypeTree(FloatClass.tpe)
- case DOUBLE => in.nextToken; TypeTree(DoubleClass.tpe)
- case BOOLEAN => in.nextToken; TypeTree(BooleanClass.tpe)
- case _ => syntaxError("illegal start of type", true); errorTypeTree
+ case BYTE => in.nextToken(); TypeTree(ByteTpe)
+ case SHORT => in.nextToken(); TypeTree(ShortTpe)
+ case CHAR => in.nextToken(); TypeTree(CharTpe)
+ case INT => in.nextToken(); TypeTree(IntTpe)
+ case LONG => in.nextToken(); TypeTree(LongTpe)
+ case FLOAT => in.nextToken(); TypeTree(FloatTpe)
+ case DOUBLE => in.nextToken(); TypeTree(DoubleTpe)
+ case BOOLEAN => in.nextToken(); TypeTree(BooleanTpe)
+ case _ => syntaxError("illegal start of type", skipIt = true); errorTypeTree
}
}
def typ(): Tree =
optArrayBrackets {
- if (in.token == FINAL) in.nextToken
+ if (in.token == FINAL) in.nextToken()
if (in.token == IDENTIFIER) {
var t = typeArgs(atPos(in.currentPos)(Ident(ident())))
// typeSelect generates Select nodes is the lhs is an Ident or Select,
@@ -287,7 +278,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case _ => SelectFromTypeTree(t, name.toTypeName)
}
while (in.token == DOT) {
- in.nextToken
+ in.nextToken()
t = typeArgs(atPos(in.currentPos)(typeSelect(t, ident())))
}
convertToTypeId(t)
@@ -301,16 +292,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeArg(): Tree =
if (in.token == QMARK) {
val pos = in.currentPos
- in.nextToken
- var lo: Tree = TypeTree(NothingClass.tpe)
- var hi: Tree = TypeTree(AnyClass.tpe)
- if (in.token == EXTENDS) {
- in.nextToken
- hi = typ()
- } else if (in.token == SUPER) {
- in.nextToken
- lo = typ()
- }
+ in.nextToken()
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else EmptyTree
+ val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree
val tdef = atPos(pos) {
TypeDef(
Modifiers(Flags.JAVA | Flags.DEFERRED),
@@ -324,7 +308,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
typ()
}
if (in.token == LT) {
- in.nextToken
+ in.nextToken()
val t1 = convertToTypeId(t)
val args = repsep(typeArg, COMMA)
acceptClosingAngle()
@@ -339,7 +323,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def annotations(): List[Tree] = {
//var annots = new ListBuffer[Tree]
while (in.token == AT) {
- in.nextToken
+ in.nextToken()
annotation()
}
List() // don't pass on annotations for now
@@ -348,46 +332,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
/** Annotation ::= TypeName [`(` AnnotationArgument {`,` AnnotationArgument} `)`]
*/
def annotation() {
- val pos = in.currentPos
- var t = qualId()
+ qualId()
if (in.token == LPAREN) { skipAhead(); accept(RPAREN) }
else if (in.token == LBRACE) { skipAhead(); accept(RBRACE) }
}
-/*
- def annotationArg() = {
- val pos = in.token
- if (in.token == IDENTIFIER && in.lookaheadToken == ASSIGN) {
- val name = ident()
- accept(ASSIGN)
- atPos(pos) {
- ValDef(Modifiers(Flags.JAVA), name, TypeTree(), elementValue())
- }
- } else {
- elementValue()
- }
- }
-
- def elementValue(): Tree =
- if (in.token == AT) annotation()
- else if (in.token == LBRACE) elementValueArrayInitializer()
- else expression1()
-
- def elementValueArrayInitializer() = {
- accept(LBRACE)
- val buf = new ListBuffer[Tree]
- def loop() =
- if (in.token != RBRACE) {
- buf += elementValue()
- if (in.token == COMMA) {
- in.nextToken
- loop()
- }
- }
- loop()
- accept(RBRACE)
- buf.toList
- }
- */
def modifiers(inInterface: Boolean): Modifiers = {
var flags: Long = Flags.JAVA
@@ -399,41 +347,41 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (true) {
in.token match {
case AT if (in.lookaheadToken != INTERFACE) =>
- in.nextToken
+ in.nextToken()
annotation()
case PUBLIC =>
isPackageAccess = false
- in.nextToken
+ in.nextToken()
case PROTECTED =>
flags |= Flags.PROTECTED
- in.nextToken
+ in.nextToken()
case PRIVATE =>
isPackageAccess = false
flags |= Flags.PRIVATE
- in.nextToken
+ in.nextToken()
case STATIC =>
flags |= Flags.STATIC
- in.nextToken
+ in.nextToken()
case ABSTRACT =>
flags |= Flags.ABSTRACT
- in.nextToken
+ in.nextToken()
case FINAL =>
flags |= Flags.FINAL
- in.nextToken
+ in.nextToken()
case DEFAULT =>
flags |= Flags.DEFAULTMETHOD
in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
- in.nextToken
+ in.nextToken()
case TRANSIENT =>
addAnnot(TransientAttr)
- in.nextToken
+ in.nextToken()
case VOLATILE =>
addAnnot(VolatileAttr)
- in.nextToken
+ in.nextToken()
case SYNCHRONIZED | STRICTFP =>
- in.nextToken
+ in.nextToken()
case _ =>
val privateWithin: TypeName =
if (isPackageAccess && !inInterface) thisPackageName
@@ -447,7 +395,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParams(): List[TypeDef] =
if (in.token == LT) {
- in.nextToken
+ in.nextToken()
val tparams = repsep(typeParam, COMMA)
acceptClosingAngle()
tparams
@@ -456,22 +404,15 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def typeParam(): TypeDef =
atPos(in.currentPos) {
val name = identForType()
- val hi =
- if (in.token == EXTENDS) {
- in.nextToken
- bound()
- } else {
- scalaDot(tpnme.Any)
- }
- TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, List(),
- TypeBoundsTree(scalaDot(tpnme.Nothing), hi))
+ val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else EmptyTree
+ TypeDef(Modifiers(Flags.JAVA | Flags.DEFERRED | Flags.PARAM), name, Nil, TypeBoundsTree(EmptyTree, hi))
}
def bound(): Tree =
atPos(in.currentPos) {
val buf = ListBuffer[Tree](typ())
while (in.token == AMP) {
- in.nextToken
+ in.nextToken()
buf += typ()
}
val ts = buf.toList
@@ -487,21 +428,21 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
}
def formalParam(): ValDef = {
- if (in.token == FINAL) in.nextToken
+ if (in.token == FINAL) in.nextToken()
annotations()
var t = typ()
if (in.token == DOTDOTDOT) {
- in.nextToken
+ in.nextToken()
t = atPos(t.pos) {
AppliedTypeTree(scalaDot(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME), List(t))
}
}
- varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident())
+ varDecl(in.currentPos, Modifiers(Flags.JAVA | Flags.PARAM), t, ident().toTermName)
}
def optThrows() {
if (in.token == THROWS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
}
}
@@ -520,8 +461,8 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val isVoid = in.token == VOID
var rtpt =
if (isVoid) {
- in.nextToken
- TypeTree(UnitClass.tpe) setPos in.pos
+ in.nextToken()
+ TypeTree(UnitTpe) setPos in.pos
} else typ()
var pos = in.currentPos
val rtptName = rtpt match {
@@ -555,9 +496,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
}
- mods1 = mods1 withAnnotations List(annot)
+ mods1 = mods1 withAnnotations annot :: Nil
skipTo(SEMI)
accept(SEMI)
blankExpr
@@ -591,18 +532,18 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
* these potential definitions are real or not.
*/
def fieldDecls(pos: Position, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
- val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name))
+ val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName))
val maybe = new ListBuffer[Tree] // potential variable definitions.
while (in.token == COMMA) {
- in.nextToken
+ in.nextToken()
if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
val name = ident()
if (in.token == ASSIGN || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
buf ++= maybe
- buf += varDecl(in.currentPos, mods, tpt.duplicate, name)
+ buf += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
maybe.clear()
} else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
- maybe += varDecl(in.currentPos, mods, tpt.duplicate, name)
+ maybe += varDecl(in.currentPos, mods, tpt.duplicate, name.toTermName)
} else { // ... if there's something else we were still in the initializer of the
// previous var def; skip to next comma or semicolon.
skipTo(COMMA, SEMI)
@@ -675,25 +616,25 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def collectIdents() : Int = {
if (in.token == ASTERISK) {
val starOffset = in.pos
- in.nextToken
+ in.nextToken()
buf += nme.WILDCARD
starOffset
} else {
val nameOffset = in.pos
buf += ident()
if (in.token == DOT) {
- in.nextToken
+ in.nextToken()
collectIdents()
} else nameOffset
}
}
- if (in.token == STATIC) in.nextToken
+ if (in.token == STATIC) in.nextToken()
else buf += nme.ROOTPKG
val lastnameOffset = collectIdents()
accept(SEMI)
val names = buf.toList
if (names.length < 2) {
- syntaxError(pos, "illegal import", false)
+ syntaxError(pos, "illegal import", skipIt = false)
List()
} else {
val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
@@ -708,7 +649,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
def interfacesOpt() =
if (in.token == IMPLEMENTS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
} else {
List()
@@ -721,7 +662,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val tparams = typeParams()
val superclass =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
typ()
} else {
javaLangObject()
@@ -740,10 +681,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val tparams = typeParams()
val parents =
if (in.token == EXTENDS) {
- in.nextToken
+ in.nextToken()
repsep(typ, COMMA)
} else {
- List(javaLangObject)
+ List(javaLangObject())
}
val (statics, body) = typeBody(INTERFACE, name)
addCompanionObject(statics, atPos(pos) {
@@ -770,7 +711,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
skipAhead() // skip init block, we just assume we have seen only static
accept(RBRACE)
} else if (in.token == SEMI) {
- in.nextToken
+ in.nextToken()
} else {
if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.STATIC
val decls = memberDecl(mods, parentToken)
@@ -822,7 +763,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (in.token != RBRACE && in.token != SEMI && in.token != EOF) {
buf += enumConst(enumType)
if (in.token == COMMA) {
- in.nextToken
+ in.nextToken()
parseEnumConsts()
}
}
@@ -831,7 +772,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val consts = buf.toList
val (statics, body) =
if (in.token == SEMI) {
- in.nextToken
+ in.nextToken()
typeBodyDecls(ENUM, name)
} else {
(List(), List())
@@ -844,7 +785,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
blankExpr),
DefDef(
Modifiers(Flags.JAVA | Flags.STATIC), nme.valueOf, List(),
- List(List(makeParam("x", TypeTree(StringClass.tpe)))),
+ List(List(makeParam("x", TypeTree(StringTpe)))),
enumType,
blankExpr))
accept(RBRACE)
@@ -873,7 +814,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
// The STABLE flag is to signal to namer that this was read from a
// java enum, and so should be given a Constant type (thereby making
// it usable in annotations.)
- ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name, enumType, blankExpr)
+ ValDef(Modifiers(Flags.STABLE | Flags.JAVA | Flags.STATIC), name.toTermName, enumType, blankExpr)
}
}
@@ -882,13 +823,13 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case INTERFACE => interfaceDecl(mods)
case AT => annotationDecl(mods)
case CLASS => classDecl(mods)
- case _ => in.nextToken; syntaxError("illegal start of type declaration", true); List(errorTypeTree)
+ case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
}
/** CompilationUnit ::= [package QualId semi] TopStatSeq
*/
def compilationUnit(): Tree = {
- var pos = in.currentPos;
+ var pos = in.currentPos
val pkg: RefTree =
if (in.token == AT || in.token == PACKAGE) {
annotations()
@@ -908,9 +849,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
while (in.token == IMPORT)
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
- while (in.token == SEMI) in.nextToken
+ while (in.token == SEMI) in.nextToken()
if (in.token != EOF)
- buf ++= typeDecl(modifiers(false))
+ buf ++= typeDecl(modifiers(inInterface = false))
}
accept(EOF)
atPos(pos) {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index e230585a8b..e987b6de2f 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -10,7 +10,7 @@ import scala.tools.nsc.util.JavaCharArrayReader
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import JavaTokens._
-import scala.annotation.switch
+import scala.annotation.{ switch, tailrec }
import scala.language.implicitConversions
// Todo merge these better with Scanners
@@ -57,23 +57,14 @@ trait JavaScanners extends ast.parser.ScannersCommon {
/** ...
*/
abstract class AbstractJavaScanner extends AbstractJavaTokenData {
- implicit def p2g(pos: Position): ScanPosition
implicit def g2p(pos: ScanPosition): Position
- /** the last error position
- */
- var errpos: ScanPosition
- var lastPos: ScanPosition
- def skipToken: ScanPosition
def nextToken(): Unit
def next: AbstractJavaTokenData
def intVal(negated: Boolean): Long
def floatVal(negated: Boolean): Double
- def intVal: Long = intVal(false)
- def floatVal: Double = floatVal(false)
- //def token2string(token : Int) : String = configuration.token2string(token)
- /** return recent scala doc, if any */
- def flushDoc: DocComment
+ def intVal: Long = intVal(negated = false)
+ def floatVal: Double = floatVal(negated = false)
def currentPos: Position
}
@@ -227,17 +218,9 @@ trait JavaScanners extends ast.parser.ScannersCommon {
abstract class JavaScanner extends AbstractJavaScanner with JavaTokenData with Cloneable with ScannerCommon {
override def intVal = super.intVal// todo: needed?
override def floatVal = super.floatVal
- override var errpos: Int = NoPos
def currentPos: Position = g2p(pos - 1)
-
var in: JavaCharArrayReader = _
- def dup: JavaScanner = {
- val dup = clone().asInstanceOf[JavaScanner]
- dup.in = in.dup
- dup
- }
-
/** character buffer for literals
*/
val cbuf = new StringBuilder()
@@ -252,22 +235,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
cbuf.setLength(0)
}
- /** buffer for the documentation comment
- */
- var docBuffer: StringBuilder = null
-
- def flushDoc: DocComment = {
- val ret = if (docBuffer != null) DocComment(docBuffer.toString, NoPosition) else null
- docBuffer = null
- ret
- }
-
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
private class JavaTokenData0 extends JavaTokenData
/** we need one token lookahead
@@ -277,13 +244,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
// Get next token ------------------------------------------------------------
- /** read next token and return last position
- */
- def skipToken: Int = {
- val p = pos; nextToken
- p - 1
- }
-
def nextToken() {
if (next.token == EMPTY) {
fetchToken()
@@ -296,7 +256,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def lookaheadToken: Int = {
prev copyFrom this
- nextToken
+ nextToken()
val t = token
next copyFrom this
this copyFrom prev
@@ -308,11 +268,10 @@ trait JavaScanners extends ast.parser.ScannersCommon {
private def fetchToken() {
if (token == EOF) return
lastPos = in.cpos - 1
- //var index = bp
while (true) {
in.ch match {
case ' ' | '\t' | CR | LF | FF =>
- in.next
+ in.next()
case _ =>
pos = in.cpos
(in.ch: @switch) match {
@@ -329,47 +288,47 @@ trait JavaScanners extends ast.parser.ScannersCommon {
'u' | 'v' | 'w' | 'x' | 'y' |
'z' =>
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
return
case '0' =>
putChar(in.ch)
- in.next
+ in.next()
if (in.ch == 'x' || in.ch == 'X') {
- in.next
+ in.next()
base = 16
} else {
base = 8
}
- getNumber
+ getNumber()
return
case '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
base = 10
- getNumber
+ getNumber()
return
case '\"' =>
- in.next
+ in.next()
while (in.ch != '\"' && (in.isUnicode || in.ch != CR && in.ch != LF && in.ch != SU)) {
getlitch()
}
if (in.ch == '\"') {
token = STRINGLIT
setName()
- in.next
+ in.next()
} else {
syntaxError("unclosed string literal")
}
return
case '\'' =>
- in.next
+ in.next()
getlitch()
if (in.ch == '\'') {
- in.next
+ in.next()
token = CHARLIT
setName()
} else {
@@ -379,31 +338,31 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case '=' =>
token = ASSIGN
- in.next
+ in.next()
if (in.ch == '=') {
token = EQEQ
- in.next
+ in.next()
}
return
case '>' =>
token = GT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTEQ
- in.next
+ in.next()
} else if (in.ch == '>') {
token = GTGT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTGTEQ
- in.next
+ in.next()
} else if (in.ch == '>') {
token = GTGTGT
- in.next
+ in.next()
if (in.ch == '=') {
token = GTGTGTEQ
- in.next
+ in.next()
}
}
}
@@ -411,145 +370,145 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case '<' =>
token = LT
- in.next
+ in.next()
if (in.ch == '=') {
token = LTEQ
- in.next
+ in.next()
} else if (in.ch == '<') {
token = LTLT
- in.next
+ in.next()
if (in.ch == '=') {
token = LTLTEQ
- in.next
+ in.next()
}
}
return
case '!' =>
token = BANG
- in.next
+ in.next()
if (in.ch == '=') {
token = BANGEQ
- in.next
+ in.next()
}
return
case '~' =>
token = TILDE
- in.next
+ in.next()
return
case '?' =>
token = QMARK
- in.next
+ in.next()
return
case ':' =>
token = COLON
- in.next
+ in.next()
return
case '@' =>
token = AT
- in.next
+ in.next()
return
case '&' =>
token = AMP
- in.next
+ in.next()
if (in.ch == '&') {
token = AMPAMP
- in.next
+ in.next()
} else if (in.ch == '=') {
token = AMPEQ
- in.next
+ in.next()
}
return
case '|' =>
token = BAR
- in.next
+ in.next()
if (in.ch == '|') {
token = BARBAR
- in.next
+ in.next()
} else if (in.ch == '=') {
token = BAREQ
- in.next
+ in.next()
}
return
case '+' =>
token = PLUS
- in.next
+ in.next()
if (in.ch == '+') {
token = PLUSPLUS
- in.next
+ in.next()
} else if (in.ch == '=') {
token = PLUSEQ
- in.next
+ in.next()
}
return
case '-' =>
token = MINUS
- in.next
+ in.next()
if (in.ch == '-') {
token = MINUSMINUS
- in.next
+ in.next()
} else if (in.ch == '=') {
token = MINUSEQ
- in.next
+ in.next()
}
return
case '*' =>
token = ASTERISK
- in.next
+ in.next()
if (in.ch == '=') {
token = ASTERISKEQ
- in.next
+ in.next()
}
return
case '/' =>
- in.next
+ in.next()
if (!skipComment()) {
token = SLASH
- in.next
+ in.next()
if (in.ch == '=') {
token = SLASHEQ
- in.next
+ in.next()
}
return
}
case '^' =>
token = HAT
- in.next
+ in.next()
if (in.ch == '=') {
token = HATEQ
- in.next
+ in.next()
}
return
case '%' =>
token = PERCENT
- in.next
+ in.next()
if (in.ch == '=') {
token = PERCENTEQ
- in.next
+ in.next()
}
return
case '.' =>
token = DOT
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '9') {
- putChar('.'); getFraction
+ putChar('.'); getFraction()
} else if (in.ch == '.') {
- in.next
+ in.next()
if (in.ch == '.') {
- in.next
+ in.next()
token = DOTDOTDOT
} else syntaxError("`.' character expected")
}
@@ -557,60 +516,60 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case ';' =>
token = SEMI
- in.next
+ in.next()
return
case ',' =>
token = COMMA
- in.next
+ in.next()
return
case '(' =>
token = LPAREN
- in.next
+ in.next()
return
case '{' =>
token = LBRACE
- in.next
+ in.next()
return
case ')' =>
token = RPAREN
- in.next
+ in.next()
return
case '}' =>
token = RBRACE
- in.next
+ in.next()
return
case '[' =>
token = LBRACKET
- in.next
+ in.next()
return
case ']' =>
token = RBRACKET
- in.next
+ in.next()
return
case SU =>
if (!in.hasNext) token = EOF
else {
syntaxError("illegal character")
- in.next
+ in.next()
}
return
case _ =>
if (Character.isUnicodeIdentifierStart(in.ch)) {
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
} else {
syntaxError("illegal character: "+in.ch.toInt)
- in.next
+ in.next()
}
return
}
@@ -618,33 +577,20 @@ trait JavaScanners extends ast.parser.ScannersCommon {
}
}
- private def skipComment(): Boolean = {
- if (in.ch == '/') {
- do {
- in.next
- } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
- true
- } else if (in.ch == '*') {
- docBuffer = null
- in.next
- val scalaDoc = ("/**", "*/")
- if (in.ch == '*' && forScaladoc)
- docBuffer = new StringBuilder(scalaDoc._1)
- do {
- do {
- if (in.ch != '*' && in.ch != SU) {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '*' && in.ch != SU)
- while (in.ch == '*') {
- in.next; putDocChar(in.ch)
- }
- } while (in.ch != '/' && in.ch != SU)
- if (in.ch == '/') in.next
- else incompleteInputError("unclosed comment")
- true
- } else {
- false
+ protected def skipComment(): Boolean = {
+ @tailrec def skipLineComment(): Unit = in.ch match {
+ case CR | LF | SU =>
+ case _ => in.next; skipLineComment()
+ }
+ @tailrec def skipJavaComment(): Unit = in.ch match {
+ case SU => incompleteInputError("unclosed comment")
+ case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment()
+ case _ => in.next; skipJavaComment()
+ }
+ in.ch match {
+ case '/' => in.next ; skipLineComment() ; true
+ case '*' => in.next ; skipJavaComment() ; true
+ case _ => false
}
}
@@ -668,12 +614,12 @@ trait JavaScanners extends ast.parser.ScannersCommon {
'0' | '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
putChar(in.ch)
- in.next
+ in.next()
case '_' =>
putChar(in.ch)
- in.next
- getIdentRest
+ in.next()
+ getIdentRest()
return
case SU =>
setName()
@@ -682,7 +628,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
case _ =>
if (Character.isUnicodeIdentifierPart(in.ch)) {
putChar(in.ch)
- in.next
+ in.next()
} else {
setName()
token = JavaScannerConfiguration.name2token(name)
@@ -698,17 +644,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
protected def getlitch() =
if (in.ch == '\\') {
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '7') {
val leadch: Char = in.ch
var oct: Int = digit2int(in.ch, 8)
- in.next
+ in.next()
if ('0' <= in.ch && in.ch <= '7') {
oct = oct * 8 + digit2int(in.ch, 8)
- in.next
+ in.next()
if (leadch <= '3' && '0' <= in.ch && in.ch <= '7') {
oct = oct * 8 + digit2int(in.ch, 8)
- in.next
+ in.next()
}
}
putChar(oct.asInstanceOf[Char])
@@ -726,11 +672,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
syntaxError(in.cpos - 1, "invalid escape character")
putChar(in.ch)
}
- in.next
+ in.next()
}
} else {
putChar(in.ch)
- in.next
+ in.next()
}
/** read fractional part and exponent of floating point number
@@ -740,35 +686,35 @@ trait JavaScanners extends ast.parser.ScannersCommon {
token = DOUBLELIT
while ('0' <= in.ch && in.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
}
if (in.ch == 'e' || in.ch == 'E') {
val lookahead = in.copy
- lookahead.next
+ lookahead.next()
if (lookahead.ch == '+' || lookahead.ch == '-') {
- lookahead.next
+ lookahead.next()
}
if ('0' <= lookahead.ch && lookahead.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
if (in.ch == '+' || in.ch == '-') {
putChar(in.ch)
- in.next
+ in.next()
}
while ('0' <= in.ch && in.ch <= '9') {
putChar(in.ch)
- in.next
+ in.next()
}
}
token = DOUBLELIT
}
if (in.ch == 'd' || in.ch == 'D') {
putChar(in.ch)
- in.next
+ in.next()
token = DOUBLELIT
} else if (in.ch == 'f' || in.ch == 'F') {
putChar(in.ch)
- in.next
+ in.next()
token = FLOATLIT
}
setName()
@@ -778,7 +724,7 @@ trait JavaScanners extends ast.parser.ScannersCommon {
*/
def intVal(negated: Boolean): Long = {
if (token == CHARLIT && !negated) {
- if (name.length > 0) name.charAt(0) else 0
+ if (name.length > 0) name.charAt(0).toLong else 0
} else {
var value: Long = 0
val divider = if (base == 10) 1 else 2
@@ -828,23 +774,23 @@ trait JavaScanners extends ast.parser.ScannersCommon {
protected def getNumber() {
while (digit2int(in.ch, if (base < 10) 10 else base) >= 0) {
putChar(in.ch)
- in.next
+ in.next()
}
token = INTLIT
if (base <= 10 && in.ch == '.') {
val lookahead = in.copy
- lookahead.next
+ lookahead.next()
lookahead.ch match {
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' |
'8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' =>
putChar(in.ch)
- in.next
- return getFraction
+ in.next()
+ return getFraction()
case _ =>
if (!isIdentifierStart(lookahead.ch)) {
putChar(in.ch)
- in.next
- return getFraction
+ in.next()
+ return getFraction()
}
}
}
@@ -852,11 +798,11 @@ trait JavaScanners extends ast.parser.ScannersCommon {
(in.ch == 'e' || in.ch == 'E' ||
in.ch == 'f' || in.ch == 'F' ||
in.ch == 'd' || in.ch == 'D')) {
- return getFraction
+ return getFraction()
}
setName()
if (in.ch == 'l' || in.ch == 'L') {
- in.next
+ in.next()
token = LONGLIT
}
}
@@ -868,7 +814,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def syntaxError(pos: Int, msg: String) {
error(pos, msg)
token = ERROR
- errpos = pos
}
/** generate an error at the current token position
@@ -879,7 +824,6 @@ trait JavaScanners extends ast.parser.ScannersCommon {
def incompleteInputError(msg: String) {
incompleteInputError(pos, msg)
token = EOF
- errpos = pos
}
override def toString() = token match {
@@ -908,21 +852,17 @@ trait JavaScanners extends ast.parser.ScannersCommon {
/** INIT: read lookahead character and token.
*/
def init() {
- in.next
- nextToken
+ in.next()
+ nextToken()
}
}
- /** ...
- */
class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner {
in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError)
- init
- def warning(pos: Int, msg: String) = unit.warning(pos, msg)
+ init()
def error (pos: Int, msg: String) = unit. error(pos, msg)
def incompleteInputError(pos: Int, msg: String) = unit.incompleteInputError(pos, msg)
def deprecationWarning(pos: Int, msg: String) = unit.deprecationWarning(pos, msg)
- implicit def p2g(pos: Position): Int = if (pos.isDefined) pos.point else -1
implicit def g2p(pos: Int): Position = new OffsetPosition(unit.source, pos)
}
}
diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
index a562de291d..953a3c6d82 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala
@@ -68,9 +68,6 @@ object JavaTokens extends ast.parser.Tokens {
final val VOLATILE = 68
final val WHILE = 69
- def isKeyword(code : Int) =
- code >= ABSTRACT && code <= WHILE
-
/** special symbols */
final val COMMA = 70
final val SEMI = 71
@@ -115,9 +112,6 @@ object JavaTokens extends ast.parser.Tokens {
final val GTGTEQ = 113
final val GTGTGTEQ = 114
- def isSymbol(code : Int) =
- code >= COMMA && code <= GTGTGTEQ
-
/** parenthesis */
final val LPAREN = 115
final val RPAREN = 116
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
deleted file mode 100644
index 5ca9fd5062..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ /dev/null
@@ -1,138 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import ast.{ Printers, Trees }
-import java.io.{ StringWriter, PrintWriter }
-import scala.annotation.elidable
-import scala.language.postfixOps
-
-/** Ancillary bits of ParallelMatching which are better off
- * out of the way.
- */
-trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
-
- import global.{ typer => _, _ }
- import CODE._
-
- /** Debugging support: enable with -Ypmat-debug **/
- private final def trace = settings.Ypmatdebug.value
-
- def impossible: Nothing = abort("this never happens")
-
- def treeCollect[T](tree: Tree, pf: PartialFunction[Tree, T]): List[T] =
- tree filter (pf isDefinedAt _) map (x => pf(x))
-
- object Types {
- import definitions._
-
- val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
-
- implicit class RichType(undecodedTpe: Type) {
- def tpe = decodedEqualsType(undecodedTpe)
- def isAnyRef = tpe <:< AnyRefClass.tpe
-
- // These tests for final classes can inspect the typeSymbol
- private def is(s: Symbol) = tpe.typeSymbol eq s
- def isByte = is(ByteClass)
- def isShort = is(ShortClass)
- def isInt = is(IntClass)
- def isChar = is(CharClass)
- def isBoolean = is(BooleanClass)
- def isNothing = is(NothingClass)
- def isArray = is(ArrayClass)
- }
- }
-
- object Debug {
- def typeToString(t: Type): String = t match {
- case NoType => "x"
- case x => x.toString
- }
- def symbolToString(s: Symbol): String = s match {
- case x => x.toString
- }
- def treeToString(t: Tree): String = treeInfo.unbind(t) match {
- case EmptyTree => "?"
- case WILD() => "_"
- case Literal(Constant(x)) => "LIT(%s)".format(x)
- case Apply(fn, args) => "%s(%s)".format(treeToString(fn), args map treeToString mkString ",")
- case Typed(expr, tpt) => "%s: %s".format(treeToString(expr), treeToString(tpt))
- case x => x.toString + " (" + x.getClass + ")"
- }
-
- // Formatting for some error messages
- private val NPAD = 15
- def pad(s: String): String = "%%%ds" format (NPAD-1) format s
- def pad(s: Any): String = pad(s match {
- case x: Tree => treeToString(x)
- case x => x.toString
- })
-
- // pretty print for debugging
- def pp(x: Any): String = pp(x, false)
- def pp(x: Any, newlines: Boolean): String = {
- val stripStrings = List("""java\.lang\.""", """\$iw\.""")
-
- def clean(s: String): String =
- stripStrings.foldLeft(s)((s, x) => s.replaceAll(x, ""))
-
- def pplist(xs: List[Any]): String =
- if (newlines) (xs map (" " + _ + "\n")).mkString("\n", "", "")
- else xs.mkString("(", ", ", ")")
-
- pp(x match {
- case s: String => return clean(s)
- case x: Tree => asCompactString(x)
- case xs: List[_] => pplist(xs map pp)
- case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
- case x => x.toString
- })
- }
-
- @elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = {
- if (trace) {
- val msg = if (xs.isEmpty) f else f.format(xs map pp: _*)
- println(msg)
- }
- }
- @elidable(elidable.FINE) def traceCategory(cat: String, f: String, xs: Any*) = {
- if (trace)
- TRACE("[" + """%10s""".format(cat) + "] " + f, xs: _*)
- }
- def tracing[T](s: String)(x: T): T = {
- if (trace)
- println(("[" + """%10s""".format(s) + "] %s") format pp(x))
-
- x
- }
- private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = {
- println(fmt.format(xs: _*) + " == " + x)
- x
- }
- private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
- if (settings.debug.value) printing(fmt, xs: _*)(x)
- else x
- }
-
- def indent(s: Any) = s.toString() split "\n" map (" " + _) mkString "\n"
- def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
- }
-
- /** Drops the 'i'th element of a list.
- */
- def dropIndex[T](xs: List[T], n: Int) = {
- val (l1, l2) = xs splitAt n
- l1 ::: (l2 drop 1)
- }
-
- /** Extract the nth element of a list and return it and the remainder.
- */
- def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
- (xs(n), dropIndex(xs, n))
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
deleted file mode 100644
index daefe4c545..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ /dev/null
@@ -1,259 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import symtab.Flags
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-trait Matrix extends MatrixAdditions {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import analyzer.Typer
- import CODE._
- import Debug._
- import Flags.{ SYNTHETIC, MUTABLE }
-
- private[matching] val NO_EXHAUSTIVE = Flags.TRANS_FLAG
-
- /** Translation of match expressions.
- *
- * `p`: pattern
- * `g`: guard
- * `bx`: body index
- *
- * internal representation is (tvars:List[Symbol], rows:List[Row])
- *
- * tmp1 tmp_n
- * Row( p_11 ... p_1n g_1 b_1 ) + subst
- *
- * Row( p_m1 ... p_mn g_m b_m ) + subst
- *
- * Implementation based on the algorithm described in
- *
- * "A Term Pattern-Match Compiler Inspired by Finite Automata Theory"
- * Mikael Pettersson
- * ftp://ftp.ida.liu.se/pub/labs/pelab/papers/cc92pmc.ps.gz
- *
- * @author Burak Emir
- */
-
- /** "The Mixture Rule"
-
- {v=pat1, pats1 .. } {q1}
- match {.. } {..}
- {v=patn, patsn .. } {qn}
-
- The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
- constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
- The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
- tor and possibly a default arc). Foreach constructor in the selected column, its arc is defined as
- follows:
-
- Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
- terns are viewed as regular expressions, this will be the indices of the patterns that either
- have the same constructor c, or are wildcards.
-
- Let {pat1,...,patj} be the patterns in the column corresponding to the indices computed
- above, and let nbe the arity of the constructor c, i.e. the number of sub-patterns it has. For
- eachpati, its n sub-patterns are extracted; if pat i is a wildcard, nwildcards are produced
- instead, each tagged with the right path variable. This results in a pattern matrix with n
- columns and j rows. This matrix is then appended to the result of selecting, from each col-
- umn in the rest of the original matrix, those rows whose indices are in {i1,...,ij}. Finally
- the indices are used to select the corresponding final states that go with these rows. Note
- that the order of the indices is significant; selected rows do not change their relative orders.
- The arc for the constructor c is now defined as (c’,state), where c’ is cwith any
- immediate sub-patterns replaced by their path variables (thus c’ is a simple pattern), and
- state is the result of recursively applying match to the new matrix and the new sequence
- of final states.
-
- Finally, the possibility for matching failure is considered. If the set of constructors is exhaustive,
- then no more arcs are computed. Otherwise, a default arc(_,state)is the last arc. If there are
- any wildcard patterns in the selected column, then their rows are selected from the rest of the
- matrix and the final states, and the state is the result of applying match to the new matrix and
- states. Otherwise,the error state is used after its reference count has been incremented.
- **/
-
- /** Handles all translation of pattern matching.
- */
- def handlePattern(
- selector: Tree, // tree being matched upon (called scrutinee after this)
- cases: List[CaseDef], // list of cases in the match
- isChecked: Boolean, // whether exhaustiveness checking is enabled (disabled with @unchecked)
- context: MatrixContext): Tree =
- {
- import context._
- TRACE("handlePattern", "(%s: %s) match { %s cases }", selector, selector.tpe, cases.size)
-
- val matrixInit: MatrixInit = {
- val v = copyVar(selector, isChecked, selector.tpe, "temp")
- MatrixInit(List(v), cases, atPos(selector.pos)(MATCHERROR(v.ident)))
- }
- val matrix = new MatchMatrix(context) { lazy val data = matrixInit }
- val mch = typer typed matrix.expansion.toTree
- val dfatree = typer typed Block(matrix.data.valDefs, mch)
-
- // redundancy check
- matrix.targets filter (_.unreached) foreach (cs => cunit.error(cs.body.pos, "unreachable code"))
- // optimize performs squeezing and resets any remaining NO_EXHAUSTIVE
- tracing("handlePattern")(matrix optimize dfatree)
- }
-
- case class MatrixContext(
- cunit: CompilationUnit, // current unit
- handleOuter: Tree => Tree, // for outer pointer
- typer: Typer, // a local typer
- owner: Symbol, // the current owner
- matchResultType: Type) // the expected result type of the whole match
- extends Squeezer
- {
- private def ifNull[T](x: T, alt: T) = if (x == null) alt else x
-
- // NO_EXHAUSTIVE communicates there should be no exhaustiveness checking
- private def flags(checked: Boolean) = if (checked) Nil else List(NO_EXHAUSTIVE)
-
- // Recording the symbols of the synthetics we create so we don't go clearing
- // anyone else's mutable flags.
- private val _syntheticSyms = mutable.HashSet[Symbol]()
- def clearSyntheticSyms() = {
- _syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE))
- debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
- _syntheticSyms.clear()
- }
- def recordSyntheticSym(sym: Symbol): Symbol = {
- _syntheticSyms += sym
- if (_syntheticSyms.size > 25000) {
- cunit.error(owner.pos, "Sanity check failed: over 25000 symbols created for pattern match.")
- abort("This is a bug in the pattern matcher.")
- }
- sym
- }
-
- case class MatrixInit(
- roots: List[PatternVar],
- cases: List[CaseDef],
- default: Tree
- ) {
- def tvars = roots map (_.lhs)
- def valDefs = roots map (_.valDef)
- override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size)
- }
-
- implicit def pvlist2pvgroup(xs: List[PatternVar]): PatternVarGroup =
- PatternVarGroup(xs)
-
- object PatternVarGroup {
- def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList)
- def apply(xs: List[PatternVar]) = new PatternVarGroup(xs)
-
- // XXX - transitional
- def fromBindings(vlist: List[Binding], freeVars: List[Symbol] = Nil) = {
- def vmap(v: Symbol): Option[Binding] = vlist find (_.pvar eq v)
- val info =
- if (freeVars.isEmpty) vlist
- else (freeVars map vmap).flatten
-
- val xs =
- for (Binding(lhs, rhs) <- info) yield
- new PatternVar(lhs, Ident(rhs) setType lhs.tpe, !(rhs hasFlag NO_EXHAUSTIVE))
-
- new PatternVarGroup(xs)
- }
- }
-
- val emptyPatternVarGroup = PatternVarGroup()
- class PatternVarGroup(val pvs: List[PatternVar]) {
- def syms = pvs map (_.sym)
- def valDefs = pvs map (_.valDef)
- def idents = pvs map (_.ident)
-
- def extractIndex(index: Int): (PatternVar, PatternVarGroup) = {
- val (t, ts) = self.extractIndex(pvs, index)
- (t, PatternVarGroup(ts))
- }
-
- def isEmpty = pvs.isEmpty
- def size = pvs.size
- def head = pvs.head
- def ::(t: PatternVar) = PatternVarGroup(t :: pvs)
- def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs)
- def ++(other: PatternVarGroup) = PatternVarGroup(pvs ::: other.pvs)
-
- def apply(i: Int) = pvs(i)
- def zipWithIndex = pvs.zipWithIndex
- def indices = pvs.indices
- def map[T](f: PatternVar => T) = pvs map f
- def filter(p: PatternVar => Boolean) = PatternVarGroup(pvs filter p)
-
- override def toString() = pp(pvs)
- }
-
- /** Every temporary variable allocated is put in a PatternVar.
- */
- class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
- def sym = lhs
- def tpe = lhs.tpe
- if (checked)
- lhs resetFlag NO_EXHAUSTIVE
- else
- lhs setFlag NO_EXHAUSTIVE
-
- // See #1427 for an example of a crash which occurs unless we retype:
- // in that instance there is an existential in the pattern.
- lazy val ident = typer typed Ident(lhs)
- lazy val valDef = typer typedValDef ValDef(lhs, rhs)
-
- override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
- }
-
- /** Given a tree, creates a new synthetic variable of the same type
- * and assigns the tree to it.
- */
- def copyVar(
- root: Tree,
- checked: Boolean,
- _tpe: Type = null,
- label: String = "temp"): PatternVar =
- {
- val tpe = ifNull(_tpe, root.tpe)
- val name = cunit.freshTermName(label)
- val sym = newVar(root.pos, tpe, flags(checked), name)
-
- tracing("copy")(new PatternVar(sym, root, checked))
- }
-
- /** Creates a new synthetic variable of the specified type and
- * assigns the result of f(symbol) to it.
- */
- def createVar(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, flags(checked))
- val rhs = f(lhs)
-
- tracing("create")(new PatternVar(lhs, rhs, checked))
- }
- def createLazy(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, Flags.LAZY :: flags(checked))
- val rhs = f(lhs)
-
- tracing("createLazy")(new PatternVar(lhs, rhs, checked))
- }
-
- private def newVar(
- pos: Position,
- tpe: Type,
- flags: List[Long] = Nil,
- name: TermName = null): Symbol =
- {
- val n = if (name == null) cunit.freshTermName("temp") else name
- // careful: pos has special meaning
- val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
- recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
deleted file mode 100644
index 7220253003..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ /dev/null
@@ -1,193 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-
-/** Traits which are mixed into MatchMatrix, but separated out as
- * (somewhat) independent components to keep them on the sidelines.
- */
-trait MatrixAdditions extends ast.TreeDSL {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import symtab.Flags
- import CODE._
- import Debug._
- import treeInfo._
- import definitions.{ isPrimitiveValueClass }
-
- /** The Squeezer, responsible for all the squeezing.
- */
- private[matching] trait Squeezer {
- self: MatrixContext =>
-
- private val settings_squeeze = !settings.Ynosqueeze.value
-
- class RefTraverser(vd: ValDef) extends Traverser {
- private val targetSymbol = vd.symbol
- private var safeRefs = 0
- private var isSafe = true
-
- def canDrop = isSafe && safeRefs == 0
- def canInline = isSafe && safeRefs == 1
-
- override def traverse(tree: Tree): Unit = tree match {
- case t: Ident if t.symbol eq targetSymbol =>
- // target symbol's owner should match currentOwner
- if (targetSymbol.owner == currentOwner) safeRefs += 1
- else isSafe = false
-
- case LabelDef(_, params, rhs) =>
- if (params exists (_.symbol eq targetSymbol)) // cannot substitute this one
- isSafe = false
-
- traverse(rhs)
- case _ if safeRefs > 1 => ()
- case _ =>
- super.traverse(tree)
- }
- }
-
- /** Compresses multiple Blocks. */
- private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match {
- case Block(stats1, expr1) if stats.isEmpty => combineBlocks(stats1, expr1)
- case _ => Block(stats, expr)
- }
- def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
- if (settings_squeeze) combineBlocks(Nil, squeezedBlock1(vds, exp))
- else combineBlocks(vds, exp)
-
- private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
- lazy val squeezedTail = squeezedBlock(vds.tail, exp)
- def default = squeezedTail match {
- case Block(vds2, exp2) => Block(vds.head :: vds2, exp2)
- case exp2 => Block(vds.head :: Nil, exp2)
- }
-
- if (vds.isEmpty) exp
- else vds.head match {
- case vd: ValDef =>
- val rt = new RefTraverser(vd)
- rt.atOwner(owner)(rt traverse squeezedTail)
-
- if (rt.canDrop)
- squeezedTail
- else if (isConstantType(vd.symbol.tpe) || rt.canInline)
- new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail
- else
- default
- case _ => default
- }
- }
- }
-
- /** The Optimizer, responsible for some of the optimizing.
- */
- private[matching] trait MatchMatrixOptimizer {
- self: MatchMatrix =>
-
- import self.context._
-
- final def optimize(tree: Tree): Tree = {
- // Uses treeInfo extractors rather than looking at trees directly
- // because the many Blocks obscure our vision.
- object lxtt extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) =>
- squeezedBlock(transformStats(stats, currentOwner), body)
- case IsIf(cond, IsTrue(), IsFalse()) =>
- transform(cond)
- case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2))
- case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld))
- case _ =>
- super.transform(tree)
- }
- }
- try lxtt transform tree
- finally clearSyntheticSyms()
- }
- }
-
- /** The Exhauster.
- */
- private[matching] trait MatrixExhaustiveness {
- self: MatchMatrix =>
-
- import self.context._
-
- /** Exhaustiveness checking requires looking for sealed classes
- * and if found, making sure all children are covered by a pattern.
- */
- class ExhaustivenessChecker(rep: Rep, matchPos: Position) {
- val Rep(tvars, rows) = rep
-
- import Flags.{ MUTABLE, ABSTRACT, SEALED }
-
- private case class Combo(index: Int, sym: Symbol) { }
-
- /* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
- private def rowCoversCombo(row: Row, combos: List[Combo]) =
- row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
-
- private def requiresExhaustive(sym: Symbol) = {
- (sym.isMutable) && // indicates that have not yet checked exhaustivity
- !(sym hasFlag NO_EXHAUSTIVE) && // indicates @unchecked
- (sym.tpe.typeSymbol.isSealed) &&
- !isPrimitiveValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- }
-
- private lazy val inexhaustives: List[List[Combo]] = {
- // let's please not get too clever side-effecting the mutable flag.
- val toCollect = tvars.zipWithIndex filter { case (pv, i) => requiresExhaustive(pv.sym) }
- val collected = toCollect map { case (pv, i) =>
- // okay, now reset the flag
- pv.sym resetFlag MUTABLE
-
- i -> (
- pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName)
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
- // have to filter out children which cannot match: see ticket #3683 for an example
- filter (_.tpe matchesPattern pv.tpe)
- )
- }
-
- val folded =
- collected.foldRight(List[List[Combo]]())((c, xs) => {
- val (i, syms) = c match { case (i, set) => (i, set.toList) }
- xs match {
- case Nil => syms map (s => List(Combo(i, s)))
- case _ => for (s <- syms ; rest <- xs) yield Combo(i, s) :: rest
- }
- })
-
- folded filterNot (combo => rows exists (r => rowCoversCombo(r, combo)))
- }
-
- private def mkPad(xs: List[Combo], i: Int): String = xs match {
- case Nil => pad("*")
- case Combo(j, sym) :: rest => if (j == i) pad(sym.name.toString) else mkPad(rest, i)
- }
- private def mkMissingStr(open: List[Combo]) =
- "missing combination %s\n" format tvars.indices.map(mkPad(open, _)).mkString
-
- /** The only public method. */
- def check = {
- def errMsg = (inexhaustives map mkMissingStr).mkString
- if (inexhaustives.nonEmpty)
- cunit.warning(matchPos, "match is not exhaustive!\n" + errMsg)
-
- rep
- }
- }
- }
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
deleted file mode 100644
index dbb9b7a003..0000000000
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ /dev/null
@@ -1,870 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Copyright 2007 Google Inc. All Rights Reserved.
- * Author: bqe@google.com (Burak Emir)
- */
-
-package scala.tools.nsc
-package matching
-
-import PartialFunction._
-import scala.collection.{ mutable }
-import scala.reflect.internal.util.Position
-import transform.ExplicitOuter
-import symtab.Flags
-import mutable.ListBuffer
-import scala.annotation.elidable
-import scala.language.postfixOps
-import scala.tools.nsc.settings.ScalaVersion
-
-trait ParallelMatching extends ast.TreeDSL
- with MatchSupport
- with Matrix
- with Patterns
- with PatternBindings
-{
- self: ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions.{
- AnyRefClass, IntClass, BooleanClass, SomeClass, OptionClass,
- getProductArgs, productProj, Object_eq, Any_asInstanceOf
- }
- import CODE._
- import Types._
- import Debug._
-
- /** Transition **/
- def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply
-
- /** The umbrella matrix class. **/
- abstract class MatchMatrix(val context: MatrixContext) extends MatchMatrixOptimizer with MatrixExhaustiveness {
- import context._
-
- def data: MatrixContext#MatrixInit
-
- lazy val MatrixInit(roots, cases, failTree) = data
- lazy val (rows, targets) = expand(roots, cases).unzip
- lazy val expansion: Rep = make(roots, rows)
-
- private val shortCuts = perRunCaches.newMap[Int, Symbol]()
-
- final def createShortCut(theLabel: Symbol): Int = {
- val key = shortCuts.size + 1
- shortCuts(key) = theLabel
- -key
- }
- def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
- val labelName = cunit.freshTermName(namePrefix)
- val labelSym = owner.newLabel(labelName, owner.pos)
- val labelInfo = MethodType(params, restpe)
-
- LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
- }
-
- /** This is the recursively focal point for translating the current
- * list of pattern variables and a list of pattern match rows into
- * a tree suitable for entering erasure.
- *
- * The first time it is called, the variables are (copies of) the
- * original pattern matcher roots, and the rows correspond to the
- * original casedefs.
- */
- final def make(roots1: PatternVarGroup, rows1: List[Row]): Rep = {
- traceCategory("New Match", "%sx%s (%s)", roots1.size, rows1.size, roots1.syms.mkString(", "))
- def classifyPat(opat: Pattern, j: Int): Pattern = opat simplify roots1(j)
-
- val newRows = rows1 flatMap (_ expandAlternatives classifyPat)
- if (rows1.length != newRows.length) make(roots1, newRows) // recursive call if any change
- else {
- val rep = Rep(roots1, newRows)
- new ExhaustivenessChecker(rep, roots.head.sym.pos).check
- rep
- }
- }
-
- override def toString() = "MatchMatrix(%s) { %s }".format(matchResultType, indentAll(targets))
-
- /**
- * Encapsulates a symbol being matched on. It is created from a
- * PatternVar, which encapsulates the symbol's creation and assignment.
- *
- * We never match on trees directly - a temporary variable is created
- * (in a PatternVar) for any expression being matched on.
- */
- class Scrutinee(val pv: PatternVar) {
- import definitions._
-
- // presenting a face of our symbol
- def sym = pv.sym
- def tpe = sym.tpe
- def pos = sym.pos
- def id = ID(sym) setPos pos // attributed ident
-
- def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil
- def accessorTypes = accessors map (x => (tpe memberType x).resultType)
-
- lazy val accessorPatternVars = PatternVarGroup(
- for ((accessor, tpe) <- accessors zip accessorTypes) yield
- createVar(tpe, _ => fn(id, accessor))
- )
-
- private def extraValDefs = if (pv.rhs.isEmpty) Nil else List(pv.valDef)
- def allValDefs = extraValDefs ::: accessorPatternVars.valDefs
-
- // tests
- def isDefined = sym ne NoSymbol
- def isSubrangeType = subrangeTypes(tpe.typeSymbol)
- def isCaseClass = tpe.typeSymbol.isCase
-
- // sequences
- def seqType = tpe.widen baseType SeqClass
- def elemType = tpe typeArgs 0
-
- private def elemAt(i: Int) = (id DOT (tpe member nme.apply))(LIT(i))
- private def createElemVar(i: Int) = createVar(elemType, _ => elemAt(i))
- private def createSeqVar(drop: Int) = createVar(seqType, _ => id DROP drop)
-
- def createSequenceVars(count: Int): List[PatternVar] =
- (0 to count).toList map (i => if (i < count) createElemVar(i) else createSeqVar(i))
-
- // for propagating "unchecked" to synthetic vars
- def isChecked = !(sym hasFlag NO_EXHAUSTIVE)
- def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _)
-
- // this is probably where this actually belongs
- def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked)
-
- def castedTo(headType: Type) =
- if (tpe =:= headType) this
- else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe)))
-
- override def toString() = "(%s: %s)".format(id, tpe)
- }
-
- def isPatternSwitch(scrut: Scrutinee, ps: List[Pattern]): Option[PatternSwitch] = {
- def isSwitchableConst(x: Pattern) = cond(x) { case x: LiteralPattern if x.isSwitchable => true }
- def isSwitchableDefault(x: Pattern) = isSwitchableConst(x) || x.isDefault
-
- // TODO - scala> (5: Any) match { case 5 => 5 ; case 6 => 7 }
- // ... should compile to a switch. It doesn't because the scrut isn't Int/Char, but
- // that could be handle in an if/else since every pattern requires an Int.
- // More immediately, Byte and Short scruts should also work.
- if (!scrut.isSubrangeType) None
- else {
- val (_lits, others) = ps span isSwitchableConst
- val lits = _lits collect { case x: LiteralPattern => x }
-
- condOpt(others) {
- case Nil => new PatternSwitch(scrut, lits, None)
- // TODO: This needs to also allow the case that the last is a compatible type pattern.
- case List(x) if isSwitchableDefault(x) => new PatternSwitch(scrut, lits, Some(x))
- }
- }
- }
-
- class PatternSwitch(
- scrut: Scrutinee,
- override val ps: List[LiteralPattern],
- val defaultPattern: Option[Pattern]
- ) extends PatternMatch(scrut, ps) {
- require(scrut.isSubrangeType && (ps forall (_.isSwitchable)))
- }
-
- case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) {
- def head = ps.head
- def tail = ps.tail
- def size = ps.length
-
- def headType = head.necessaryType
- private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0
- def dummies = emptyPatterns(dummyCount)
-
- def apply(i: Int): Pattern = ps(i)
- def pzip() = ps.zipWithIndex
- def pzip[T](others: List[T]) = {
- assert(ps.size == others.size, "Internal error: ps = %s, others = %s".format(ps, others))
- ps zip others
- }
-
- // Any unapply - returns Some(true) if a type test is needed before the unapply can
- // be called (e.g. def unapply(x: Foo) = { ... } but our scrutinee is type Any.)
- object AnyUnapply {
- def unapply(x: Pattern): Option[Boolean] = condOpt(x.tree) {
- case UnapplyParamType(tpe) => !(scrut.tpe <:< tpe)
- }
- }
-
- def mkRule(rest: Rep): RuleApplication = {
- tracing("Rule")(head match {
- case x if isEquals(x.tree.tpe) => new MixEquals(this, rest)
- case x: SequencePattern => new MixSequence(this, rest, x)
- case AnyUnapply(false) => new MixUnapply(this, rest)
- case _ =>
- isPatternSwitch(scrut, ps) match {
- case Some(x) => new MixLiteralInts(x, rest)
- case _ => new MixTypes(this, rest)
- }
- })
- }
- override def toString() = "%s match {%s}".format(scrut, indentAll(ps))
- } // PatternMatch
-
- /***** Rule Applications *****/
-
- sealed abstract class RuleApplication {
- def pmatch: PatternMatch
- def rest: Rep
- def cond: Tree
- def success: Tree
- def failure: Tree
-
- lazy val PatternMatch(scrut, patterns) = pmatch
- lazy val head = pmatch.head
- lazy val codegen: Tree = IF (cond) THEN (success) ELSE (failure)
-
- def mkFail(xs: List[Row]): Tree =
- if (xs.isEmpty) failTree
- else remake(xs).toTree
-
- def remake(
- rows: List[Row],
- pvgroup: PatternVarGroup = emptyPatternVarGroup,
- includeScrut: Boolean = true): Rep =
- {
- val scrutpvs = if (includeScrut) List(scrut.pv) else Nil
- make(pvgroup.pvs ::: scrutpvs ::: rest.tvars, rows)
- }
-
- /** translate outcome of the rule application into code (possible involving recursive application of rewriting) */
- def tree(): Tree
-
- override def toString =
- "Rule/%s (%s =^= %s)".format(getClass.getSimpleName, scrut, head)
- }
-
- /** {case ... if guard => bx} else {guardedRest} */
- /** VariableRule: The top-most rows has only variable (non-constructor) patterns. */
- case class VariableRule(subst: Bindings, guard: Tree, guardedRest: Rep, bx: Int) extends RuleApplication {
- def pmatch: PatternMatch = impossible
- def rest: Rep = guardedRest
-
- private lazy val (valDefs, successTree) = targets(bx) applyBindings subst.toMap
- lazy val cond = guard
- lazy val success = successTree
- lazy val failure = guardedRest.toTree
-
- final def tree(): Tree =
- if (bx < 0) REF(shortCuts(-bx))
- else squeezedBlock(
- valDefs,
- if (cond.isEmpty) success else codegen
- )
-
- override def toString = "(case %d) {\n Bindings: %s\n\n if (%s) { %s }\n else { %s }\n}".format(
- bx, subst, guard, success, guardedRest
- )
- }
-
- class MixLiteralInts(val pmatch: PatternSwitch, val rest: Rep) extends RuleApplication {
- val literals = pmatch.ps
- val defaultPattern = pmatch.defaultPattern
-
- private lazy val casted: Tree =
- if (!scrut.tpe.isInt) scrut.id DOT nme.toInt else scrut.id
-
- // creates a row transformer for injecting the default case bindings at a given index
- private def addDefaultVars(index: Int): Row => Row =
- if (defaultVars.isEmpty) identity
- else rebindAll(_, pmatch(index).boundVariables, scrut.sym)
-
- // add bindings for all the given vs to the given tvar
- private def rebindAll(r: Row, vs: Iterable[Symbol], tvar: Symbol) =
- r rebind r.subst.add(vs, tvar)
-
- private def bindVars(Tag: Int, orig: Bindings): Bindings = {
- def myBindVars(rest: List[(Int, List[Symbol])], bnd: Bindings): Bindings = rest match {
- case Nil => bnd
- case (Tag,vs)::xs => myBindVars(xs, bnd.add(vs, scrut.sym))
- case (_, vs)::xs => myBindVars(xs, bnd)
- }
- myBindVars(varMap, orig)
- }
-
- // bound vars and rows for default pattern (only one row, but a list is easier to use later)
- lazy val (defaultVars, defaultRows) = defaultPattern match {
- case None => (Nil, Nil)
- case Some(p) => (p.boundVariables, List(rebindAll(rest rows literals.size, p.boundVariables, scrut.sym)))
- }
-
- // literalMap is a map from each literal to a list of row indices.
- // varMap is a list from each literal to a list of the defined vars.
- lazy val (litPairs, varMap) = (
- literals.zipWithIndex map {
- case (lit, index) =>
- val tag = lit.intValue
- (tag -> index, tag -> lit.boundVariables)
- } unzip
- )
- def literalMap = litPairs groupBy (_._1) map {
- case (k, vs) => (k, vs map (_._2))
- }
-
- lazy val cases =
- for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
- val newRows = indices map (i => addDefaultVars(i)(rest rows i))
- val r = remake(newRows ++ defaultRows, includeScrut = false)
- val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
-
- CASE(Literal(Constant(tag))) ==> r2.toTree
- }
-
- lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree
- def defaultCase = CASE(WILD(IntClass.tpe)) ==> defaultTree
-
- // cond/success/failure only used if there is exactly one case.
- lazy val cond = scrut.id MEMBER_== cases.head.pat
- lazy val success = cases.head.body
- lazy val failure = defaultTree
-
- // only one case becomes if/else, otherwise match
- def tree() =
- if (cases.size == 1) codegen
- else casted MATCH (cases :+ defaultCase: _*)
- }
-
- /** mixture rule for unapply pattern
- */
- class MixUnapply(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- val Pattern(UnApply(unMethod, unArgs)) = head
- val Apply(unTarget, _ :: trailing) = unMethod
-
- object SameUnapplyCall {
- def isSame(t: Tree) = isEquivalentTree(unTarget, t)
- def unapply(x: Pattern) = /*tracing("SameUnapplyCall (%s vs. %s)".format(unTarget, x))*/(x match {
- case Pattern(UnApply(Apply(fn, _), args)) if isSame(fn) => Some(args)
- case _ => None
- })
- }
- object SameUnapplyPattern {
- def isSame(t: Tree) = isEquivalentTree(unMethod, t)
- def apply(x: Pattern) = unapply(x).isDefined
- def unapply(x: Pattern) = /*tracing("SameUnapplyPattern (%s vs. %s)".format(unMethod, x))*/(x match {
- case Pattern(UnApply(t, _)) if isSame(t) => Some(unArgs)
- case _ => None
- })
- }
-
- private lazy val zipped = pmatch pzip rest.rows
-
- lazy val unapplyResult: PatternVar =
- scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
-
- lazy val cond: Tree = unapplyResult.tpe.normalize match {
- case TypeRef(_, BooleanClass, _) => unapplyResult.ident
- case TypeRef(_, SomeClass, _) => TRUE
- case _ => NOT(unapplyResult.ident DOT nme.isEmpty)
- }
-
- lazy val failure =
- mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
-
- private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = {
- // pattern variable for the unapply result of Some(x).get
- def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match {
- case Nil => log("No type argument for unapply result! " + unMethod.tpe) ; NoType
- case arg :: _ => arg
- }
- lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get))
- def tuple = pv.lhs
-
- // at this point it's Some[T1,T2...]
- lazy val tpes = getProductArgs(tuple.tpe)
-
- // one pattern variable per tuple element
- lazy val tuplePVs =
- for ((tpe, i) <- tpes.zipWithIndex) yield
- scrut.createVar(tpe, _ => fn(ID(tuple), productProj(tuple, i + 1)))
-
- // the filter prevents infinite unapply recursion
- def mkNewRows(sameFilter: (List[Tree]) => List[Tree]) = {
- val dum = if (unArgs.length <= 1) unArgs.length else tpes.size
- for ((pat, r) <- zipped) yield pat match {
- case SameUnapplyCall(xs) => r.insert2(toPats(sameFilter(xs)) :+ NoPattern, pat.boundVariables, scrut.sym)
- case _ => r insert (emptyPatterns(dum) :+ pat)
- }
- }
-
- // 0 is Boolean, 1 is Option[T], 2+ is Option[(T1,T2,...)]
- unArgs.length match {
- case 0 => (Nil, Nil, mkNewRows((xs) => Nil))
- case 1 => (List(pv), List(pv), mkNewRows(xs => List(xs.head)))
- case _ => (pv :: tuplePVs, tuplePVs, mkNewRows(identity))
- }
- }
-
- lazy val success = {
- val (squeezePVs, pvs, rows) = doSuccess
- val srep = remake(rows, pvs).toTree
-
- squeezedBlock(squeezePVs map (_.valDef), srep)
- }
-
- final def tree() =
- squeezedBlock(List(handleOuter(unapplyResult.valDef)), codegen)
- }
-
- /** Handle Sequence patterns (including Star patterns.)
- * Note: pivot == head, just better typed.
- */
- sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication {
- require(scrut.tpe <:< head.tpe)
-
- def hasStar = pivot.hasStar
- private def pivotLen = pivot.nonStarLength
- private def seqDummies = emptyPatterns(pivot.elems.length + 1)
-
- // Should the given pattern join the expanded pivot in the success matrix? If so,
- // this partial function will be defined for the pattern, and the result of the apply
- // is the expanded sequence of new patterns.
- lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] {
- private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match {
- case (true, true) => true
- case (true, false) => pivotLen <= x.nonStarLength
- case (false, true) => pivotLen >= x.nonStarLength
- case (false, false) => pivotLen == x.nonStarLength
- }
-
- def isDefinedAt(pat: Pattern) = pat match {
- case x: SequenceLikePattern => seqIsDefinedAt(x)
- case WildcardPattern() => true
- case _ => false
- }
-
- def apply(pat: Pattern): List[Pattern] = pat match {
- case x: SequenceLikePattern =>
- def isSameLength = pivotLen == x.nonStarLength
- def rebound = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType))
-
- (pivot.hasStar, x.hasStar, isSameLength) match {
- case (true, true, true) => rebound :+ NoPattern
- case (true, true, false) => (seqDummies drop 1) :+ x
- case (true, false, true) => x.elemPatterns ++ List(NilPattern, NoPattern)
- case (false, true, true) => rebound
- case (false, false, true) => x.elemPatterns :+ NoPattern
- case _ => seqDummies
- }
-
- case _ => seqDummies
- }
- }
-
- // Should the given pattern be in the fail matrix? This is true of any sequences
- // as long as the result of the length test on the pivot doesn't make it impossible:
- // for instance if neither sequence is right ignoring and they are of different
- // lengths, the later one cannot match since its length must be wrong.
- def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match {
- case x: SequenceLikePattern =>
- (hasStar, x.hasStar) match {
- case (_, true) => true
- case (true, false) => pivotLen > x.nonStarLength
- case (false, false) => pivotLen != x.nonStarLength
- }
- case WildcardPattern() => true
- case _ => false
- })
-
- // divide the remaining rows into success/failure branches, expanding subsequences of patterns
- val successRows = pmatch pzip rest.rows collect {
- case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c)
- }
- val failRows = pmatch pzip rest.rows collect {
- case (c, row) if failureMatrixFn(c) => row insert c
- }
-
- // the discrimination test for sequences is a call to lengthCompare. Note that
- // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above:
- // any inconsistency will (and frequently has) manifested as pattern matcher crashes.
- lazy val cond = {
- // the method call symbol
- val methodOp: Symbol = head.tpe member nme.lengthCompare
-
- // the comparison to perform. If the pivot is right ignoring, then a scrutinee sequence
- // of >= pivot length could match it; otherwise it must be exactly equal.
- val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _
-
- // scrutinee.lengthCompare(pivotLength) [== | >=] 0
- val compareFn: Tree => Tree = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO)
-
- // wrapping in a null check on the scrutinee
- // XXX this needs to use the logic in "def condition"
- nullSafe(compareFn, FALSE)(scrut.id)
- // condition(head.tpe, scrut.id, head.boundVariables.nonEmpty)
- }
- lazy val success = {
- // one pattern var per sequence element up to elemCount, and one more for the rest of the sequence
- lazy val pvs = scrut createSequenceVars pivotLen
-
- squeezedBlock(pvs map (_.valDef), remake(successRows, pvs, hasStar).toTree)
- }
- lazy val failure = remake(failRows).toTree
-
- final def tree(): Tree = codegen
- }
-
- class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- private lazy val rhs =
- decodedEqualsType(head.tpe) match {
- case SingleType(pre, sym) => REF(pre, sym)
- case PseudoType(o) => o
- }
- private lazy val labelDef =
- createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
-
- lazy val cond = handleOuter(rhs MEMBER_== scrut.id)
- lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym)
- lazy val successTwo = Row(emptyPatterns(1 + rest.tvars.size), NoBinding, EmptyTree, createShortCut(labelDef.symbol))
- lazy val success = remake(List(successOne, successTwo)).toTree
- lazy val failure = labelDef
-
- final def tree() = codegen
- override def toString() = "MixEquals(%s == %s)".format(scrut, head)
- }
-
- /** Mixture rule for type tests.
- * moreSpecific: more specific patterns
- * subsumed: more general patterns (subsuming current), rows index and subpatterns
- * remaining: remaining, rows index and pattern
- */
- class MixTypes(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
- case class No(bx: Int, remaining: Pattern)
-
- val (yeses, noes) = {
- val _ys = new ListBuffer[Yes]
- val _ns = new ListBuffer[No]
-
- for ((pattern, j) <- pmatch.pzip()) {
- // scrutinee, head of pattern group
- val (s, p) = (pattern.tpe, head.necessaryType)
-
- def isEquivalent = head.necessaryType =:= pattern.tpe
- def isObjectTest = pattern.isObject && (p =:= pattern.necessaryType)
-
- def sMatchesP = matches(s, p)
- def pMatchesS = matches(p, s)
-
- def ifEquiv(yes: Pattern): Pattern = if (isEquivalent) yes else pattern
-
- def passl(p: Pattern = NoPattern, ps: List[Pattern] = pmatch.dummies) = Some(Yes(j, p, ps))
- def passr() = Some( No(j, pattern))
-
- def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
- def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
-
- val (oneY, oneN) = pattern match {
- case Pattern(LIT(null)) if !(p =:= s) => (None, passr) // (1)
- case x if isObjectTest => (passl(), None) // (2)
- case Pattern(Typed(pp, _)) if sMatchesP => (typed(pp), None) // (4)
- // The next line used to be this which "fixed" 1697 but introduced
- // numerous regressions including #3136.
- // case Pattern(_: UnApply, _) => (passl(), passr)
- case Pattern(_: UnApply) => (None, passr)
- case x if !x.isDefault && sMatchesP => (subs(), None)
- case x if x.isDefault || pMatchesS => (passl(), passr)
- case _ => (None, passr)
- }
- oneY map (_ys +=)
- oneN map (_ns +=)
- }
- (_ys.toList, _ns.toList)
- }
-
- val moreSpecific = yeses map (_.moreSpecific)
- val subsumed = yeses map (x => (x.bx, x.subsumed))
- val remaining = noes map (x => (x.bx, x.remaining))
-
- private def mkZipped =
- for (Yes(j, moreSpecific, subsumed) <- yeses) yield
- j -> (moreSpecific :: subsumed)
-
- lazy val casted = scrut castedTo pmatch.headType
- lazy val cond = condition(casted.tpe, scrut, head.boundVariables.nonEmpty)
-
- private def isAnyMoreSpecific = yeses exists (x => !x.moreSpecific.isEmpty)
- lazy val (subtests, subtestVars) =
- if (isAnyMoreSpecific) (mkZipped, List(casted.pv))
- else (subsumed, Nil)
-
- lazy val newRows =
- for ((j, ps) <- subtests) yield
- (rest rows j).insert2(ps, pmatch(j).boundVariables, casted.sym)
-
- lazy val success = {
- val srep = remake(newRows, subtestVars ::: casted.accessorPatternVars, includeScrut = false)
- squeezedBlock(casted.allValDefs, srep.toTree)
- }
-
- lazy val failure =
- mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 })
-
- final def tree(): Tree = codegen
- }
-
- /*** States, Rows, Etc. ***/
-
- case class Row(pats: List[Pattern], subst: Bindings, guard: Tree, bx: Int) {
- private def nobindings = subst.get().isEmpty
- private def bindstr = if (nobindings) "" else pp(subst)
-
- /** Extracts the 'i'th pattern. */
- def extractColumn(i: Int) = {
- val (x, xs) = extractIndex(pats, i)
- (x, copy(pats = xs))
- }
-
- /** Replaces the 'i'th pattern with the argument. */
- def replaceAt(i: Int, p: Pattern) = {
- val newps = (pats take i) ::: p :: (pats drop (i + 1))
- copy(pats = newps)
- }
-
- def insert(h: Pattern) = copy(pats = h :: pats)
- def insert(hs: List[Pattern]) = copy(pats = hs ::: pats) // prepends supplied pattern
- def rebind(b: Bindings) = copy(subst = b) // substitutes for bindings
-
- def insert2(hs: List[Pattern], vs: Iterable[Symbol], tvar: Symbol) =
- tracing("insert2")(copy(pats = hs ::: pats, subst = subst.add(vs, tvar)))
-
- // returns this rows with alternatives expanded
- def expandAlternatives(classifyPat: (Pattern, Int) => Pattern): List[Row] = {
- def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true }
-
- // classify all the top level patterns - alternatives come back unaltered
- val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled
- // see if any alternatives were in there
- val (ps, others) = newPats span isNotAlternative
- // make a new row for each alternative, with it spliced into the original position
- if (others.isEmpty) List(copy(pats = ps))
- else extractBindings(others.head) map (x => replaceAt(ps.size, x))
- }
- override def toString() = {
- val bs = if (nobindings) "" else "\n" + bindstr
- "Row(%d)(%s%s)".format(bx, pp(pats), bs)
- }
- }
- abstract class State {
- def bx: Int // index into the list of rows
- def params: List[Symbol] // bound names to be supplied as arguments to labeldef
- def body: Tree // body to execute upon match
- def label: Option[LabelDef] // label definition for this state
-
- // Called with a bindings map when a match is achieved.
- // Returns a list of variable declarations based on the labeldef parameters
- // and the given substitution, and the body to execute.
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]): (List[ValDef], Tree)
-
- final def applyBindings(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) = {
- _referenceCount += 1
- applyBindingsImpl(subst)
- }
-
- private var _referenceCount = 0
- def referenceCount = _referenceCount
- def unreached = referenceCount == 0
- def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym)
-
- // Creates a simple Ident if the symbol's type conforms to
- // the val definition's type, or a casted Ident if not.
- private def newValIdent(lhs: Symbol, rhs: Symbol) =
- if (rhs.tpe <:< lhs.tpe) Ident(rhs)
- else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe))
-
- protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
- typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
-
- protected def newValReference(lhs: Symbol, rhs: Symbol) =
- typer typed newValIdent(lhs, rhs)
-
- protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition)
- protected def identsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValReference)
-
- protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] =
- params flatMap { lhs =>
- subst get lhs map (rhs => f(lhs, rhs)) orElse {
- // This should not happen; the code should be structured so it is
- // impossible, but that still lies ahead.
- cunit.warning(lhs.pos, "No binding")
- None
- }
- }
-
- // typer is not able to digest a body of type Nothing being assigned result type Unit
- protected def caseResultType =
- if (body.tpe.isNothing) body.tpe else matchResultType
- }
-
- case class LiteralState(bx: Int, params: List[Symbol], body: Tree) extends State {
- def label = None
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) =
- (valDefsFor(subst), body.duplicate setType caseResultType)
- }
-
- case class FinalState(bx: Int, params: List[Symbol], body: Tree) extends State {
- traceCategory("Final State", "(%s) => %s", paramsString, body)
- def label = Some(labelDef)
-
- private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType)
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = {
- val tree =
- if (referenceCount > 1) ID(labelDef.symbol) APPLY identsFor(subst)
- else labelDef
-
- (valDefsFor(subst), tree)
- }
-
- private def paramsString = params map (s => s.name + ": " + s.tpe) mkString ", "
- override def toString() = pp("(%s) => %s".format(pp(params), body))
- }
-
- case class Rep(val tvars: PatternVarGroup, val rows: List[Row]) {
- lazy val Row(pats, subst, guard, index) = rows.head
- lazy val guardedRest = if (guard.isEmpty) Rep(Nil, Nil) else make(tvars, rows.tail)
- lazy val (defaults, others) = pats span (_.isDefault)
-
- /** Cut out the column containing the non-default pattern. */
- class Cut(index: Int) {
- /** The first two separate out the 'i'th pattern in each row from the remainder. */
- private val (_column, _rows) = rows map (_ extractColumn index) unzip
-
- /** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
- private val (_pv, _tvars) = tvars extractIndex index
-
- /** The non-default pattern (others.head) replaces the column head. */
- private val (_ncol, _nrep) =
- (others.head :: _column.tail, make(_tvars, _rows))
-
- def mix() = {
- val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
- PatternMatch(newScrut, _ncol) mkRule _nrep
- }
- }
-
- /** Converts this to a tree - recursively acquires subreps. */
- final def toTree(): Tree = tracing("toTree")(typer typed applyRule())
-
- /** The VariableRule. */
- private def variable() = {
- val binding = (defaults map (_.boundVariables) zip tvars.pvs) .
- foldLeft(subst)((b, pair) => b.add(pair._1, pair._2.lhs))
-
- VariableRule(binding, guard, guardedRest, index)
- }
- /** The MixtureRule: picks a rewrite rule to apply. */
- private def mixture() = new Cut(defaults.size) mix()
-
- /** Applying the rule will result in one of:
- *
- * VariableRule - if all patterns are default patterns
- * MixtureRule - if one or more patterns are not default patterns
- * Error - no rows remaining
- */
- final def applyRule(): Tree =
- if (rows.isEmpty) failTree
- else if (others.isEmpty) variable.tree()
- else mixture.tree()
-
- def ppn(x: Any) = pp(x, newlines = true)
- override def toString() =
- if (tvars.isEmpty) "Rep(%d) = %s".format(rows.size, ppn(rows))
- else "Rep(%dx%d)%s%s".format(tvars.size, rows.size, ppn(tvars), ppn(rows))
- }
-
- /** Expands the patterns recursively. */
- final def expand(roots: List[PatternVar], cases: List[CaseDef]) = tracing("expand") {
- for ((CaseDef(pat, guard, body), bx) <- cases.zipWithIndex) yield {
- val subtrees = pat match {
- case x if roots.length <= 1 => List(x)
- case Apply(_, args) => args
- case WILD() => emptyTrees(roots.length)
- }
- val params = pat filter (_.isInstanceOf[Bind]) map (_.symbol) distinct
- val row = Row(toPats(subtrees), NoBinding, guard, bx)
- val state = body match {
- case x: Literal => LiteralState(bx, params, body)
- case _ => FinalState(bx, params, body)
- }
-
- row -> state
- }
- }
-
- /** returns the condition in "if (cond) k1 else k2"
- */
- final def condition(tpe: Type, scrut: Scrutinee, isBound: Boolean): Tree = {
- assert(scrut.isDefined)
- val cond = handleOuter(condition(tpe, scrut.id, isBound))
-
- if (!needsOuterTest(tpe, scrut.tpe, owner)) cond
- else addOuterCondition(cond, tpe, scrut.id)
- }
-
- final def condition(tpe: Type, scrutTree: Tree, isBound: Boolean): Tree = {
- assert((tpe ne NoType) && (scrutTree.tpe ne NoType))
- def isMatchUnlessNull = scrutTree.tpe <:< tpe && tpe.isAnyRef
- def isRef = scrutTree.tpe.isAnyRef
-
- // See ticket #1503 for the motivation behind checking for a binding.
- // The upshot is that it is unsound to assume equality means the right
- // type, but if the value doesn't appear on the right hand side of the
- // match that's unimportant; so we add an instance check only if there
- // is a binding.
- def bindingWarning() = {
- if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
- cunit.warning(scrutTree.pos,
- "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
- }
- }
-
- def genEquals(sym: Symbol): Tree = {
- val t1: Tree = REF(sym) MEMBER_== scrutTree
-
- if (isBound) {
- bindingWarning()
- t1 AND (scrutTree IS tpe.widen)
- }
- else t1
- }
-
- typer typed {
- tpe match {
- case ConstantType(Constant(null)) if isRef => scrutTree OBJ_EQ NULL
- case ConstantType(const) => scrutTree MEMBER_== Literal(const)
- case SingleType(NoPrefix, sym) => genEquals(sym)
- case SingleType(pre, sym) if sym.isStable => genEquals(sym)
- case ThisType(sym) if sym.isModule => genEquals(sym)
- case _ if isMatchUnlessNull => scrutTree OBJ_NE NULL
- case _ => scrutTree IS tpe
- }
- }
- }
-
- /** adds a test comparing the dynamic outer to the static outer */
- final def addOuterCondition(cond: Tree, tpe2test: Type, scrut: Tree) = {
- val TypeRef(prefix, _, _) = tpe2test
- val theRef = handleOuter(prefix match {
- case NoPrefix => abort("assertion failed: NoPrefix")
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- })
- outerAccessor(tpe2test.typeSymbol) match {
- case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond
- case outerAcc =>
- val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true)
- cond AND ((casted DOT outerAcc)() OBJ_EQ theRef)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
deleted file mode 100644
index 7b2fcf0e9b..0000000000
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import PartialFunction._
-import scala.language.postfixOps
-
-trait PatternBindings extends ast.TreeDSL
-{
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import definitions.{ EqualsPatternClass }
- import CODE._
- import Debug._
-
- /** EqualsPattern **/
- def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass
- def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
- def decodedEqualsType(tpe: Type) =
- if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
-
- // A subtype test which creates fresh existentials for type
- // parameters on the right hand side.
- def matches(arg1: Type, arg2: Type) = decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
-
- // For spotting duplicate unapplies
- def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2)
-
- // Reproduce the Bind trees wrapping oldTree around newTree
- def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match {
- case b @ Bind(x, body) => Bind(b.symbol, moveBindings(body, newTree))
- case _ => newTree
- }
-
- // used as argument to `EqualsPatternClass`
- case class PseudoType(o: Tree) extends SimpleTypeProxy {
- override def underlying: Type = o.tpe
- override def safeToString: String = "PseudoType("+o+")"
- }
-
- // If the given pattern contains alternatives, return it as a list of patterns.
- // Makes typed copies of any bindings found so all alternatives point to final state.
- def extractBindings(p: Pattern): List[Pattern] =
- toPats(_extractBindings(p.boundTree, identity))
-
- private def _extractBindings(p: Tree, prevBindings: Tree => Tree): List[Tree] = {
- def newPrev(b: Bind) = (x: Tree) => treeCopy.Bind(b, b.name, x) setType x.tpe
-
- p match {
- case b @ Bind(_, body) => _extractBindings(body, newPrev(b))
- case Alternative(ps) => ps map prevBindings
- }
- }
-
- trait PatternBindingLogic {
- self: Pattern =>
-
- // This is for traversing the pattern tree - pattern types which might have
- // bound variables beneath them return a list of said patterns for flatMapping.
- def subpatternsForVars: List[Pattern] = Nil
-
- // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree.
- private var _boundTree: Tree = tree
- def boundTree = _boundTree
- def setBound(x: Bind): Pattern = {
- _boundTree = x
- this
- }
- def boundVariables = strip(boundTree)
-
- // If a tree has bindings, boundTree looks something like
- // Bind(v3, Bind(v2, Bind(v1, tree)))
- // This takes the given tree and creates a new pattern
- // using the same bindings.
- def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t))
-
- // Wrap this pattern's bindings around (_: Type)
- def rebindToType(tpe: Type, ascription: Type = null): Pattern = {
- val aType = if (ascription == null) tpe else ascription
- rebindTo(Typed(WILD(tpe), TypeTree(aType)) setType tpe)
- }
-
- // Wrap them around _
- def rebindToEmpty(tpe: Type): Pattern =
- rebindTo(Typed(EmptyTree, TypeTree(tpe)) setType tpe)
-
- // Wrap them around a singleton type for an EqualsPattern check.
- def rebindToEqualsCheck(): Pattern =
- rebindToType(equalsCheck)
-
- // Like rebindToEqualsCheck, but subtly different. Not trying to be
- // mysterious -- I haven't sorted it all out yet.
- def rebindToObjectCheck(): Pattern =
- rebindToType(mkEqualsRef(sufficientType), sufficientType)
-
- /** Helpers **/
- private def wrapBindings(vs: List[Symbol], pat: Tree): Tree = vs match {
- case Nil => pat
- case x :: xs => Bind(x, wrapBindings(xs, pat)) setType pat.tpe
- }
- private def strip(t: Tree): List[Symbol] = t match {
- case b @ Bind(_, pat) => b.symbol :: strip(pat)
- case _ => Nil
- }
- private def deepstrip(t: Tree): List[Symbol] =
- treeCollect(t, { case x: Bind => x.symbol })
- }
-
- case class Binding(pvar: Symbol, tvar: Symbol) {
- override def toString() = pvar.name + " -> " + tvar.name
- }
-
- class Bindings(private val vlist: List[Binding]) {
- // if (!vlist.isEmpty)
- // traceCategory("Bindings", this.toString)
-
- def get() = vlist
- def toMap = vlist map (x => (x.pvar, x.tvar)) toMap
-
- def add(vs: Iterable[Symbol], tvar: Symbol): Bindings = {
- val newBindings = vs.toList map (v => Binding(v, tvar))
- new Bindings(newBindings ++ vlist)
- }
-
- override def toString() =
- if (vlist.isEmpty) "<none>"
- else vlist.mkString(", ")
- }
-
- val NoBinding: Bindings = new Bindings(Nil)
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
deleted file mode 100644
index ef41246af9..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ /dev/null
@@ -1,499 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import symtab.Flags
-import PartialFunction._
-
-/** Patterns are wrappers for Trees with enhanced semantics.
- *
- * @author Paul Phillips
- */
-
-trait Patterns extends ast.TreeDSL {
- self: transform.ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions._
- import CODE._
- import Debug._
- import treeInfo.{ unbind, isStar, isVarPattern }
-
- type PatternMatch = MatchMatrix#PatternMatch
- private type PatternVar = MatrixContext#PatternVar
-
- // Fresh patterns
- def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
- def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
-
- // An empty pattern
- def NoPattern = WildcardPattern()
-
- // The constant null pattern
- def NullPattern = LiteralPattern(NULL)
-
- // The Nil pattern
- def NilPattern = Pattern(gen.mkNil)
-
- // 8.1.1
- case class VariablePattern(tree: Ident) extends NamePattern {
- lazy val Ident(name) = tree
- require(isVarPattern(tree) && name != nme.WILDCARD)
- override def covers(sym: Symbol) = true
- override def description = "%s".format(name)
- }
-
- // 8.1.1 (b)
- case class WildcardPattern() extends Pattern {
- def tree = EmptyTree
- override def covers(sym: Symbol) = true
- override def isDefault = true
- override def description = "_"
- }
-
- // 8.1.2
- case class TypedPattern(tree: Typed) extends Pattern {
- lazy val Typed(expr, tpt) = tree
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
- override def sufficientType = tpt.tpe
- override def subpatternsForVars: List[Pattern] = List(Pattern(expr))
- override def simplify(pv: PatternVar) = Pattern(expr) match {
- case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
- case _ => this
- }
- override def description = "%s: %s".format(Pattern(expr), tpt)
- }
-
- // 8.1.3
- case class LiteralPattern(tree: Literal) extends Pattern {
- lazy val Literal(const @ Constant(value)) = tree
-
- def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
- def intValue = const.intValue
- override def description = {
- val s = if (value == null) "null" else value.toString
- "Lit(%s)".format(s)
- }
- }
-
- // 8.1.4 (a)
- case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
- // XXX - see bug 3411 for code which violates this assumption
- // require (!isVarPattern(fn) && args.isEmpty)
- lazy val ident @ Ident(name) = fn
-
- override def sufficientType = Pattern(ident).equalsCheck
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Id(%s)".format(name)
- }
- // 8.1.4 (b)
- case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern {
- require (args.isEmpty)
- lazy val Apply(select: Select, _) = tree
-
- override lazy val sufficientType = qualifier.tpe match {
- case t: ThisType => singleType(t, sym) // this.X
- case _ =>
- qualifier match {
- case _: Apply => PseudoType(tree)
- case _ => singleType(Pattern(qualifier).necessaryType, sym)
- }
- }
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = backticked match {
- case Some(s) => "this." + s
- case _ => "Sel(%s.%s)".format(Pattern(qualifier), name)
- }
-
- }
- // 8.1.4 (c)
- case class StableIdPattern(tree: Select) extends SelectPattern {
- def select = tree
- override def description = "St(%s)".format(printableSegments.mkString(" . "))
- private def printableSegments =
- pathSegments filter (x => !x.isEmpty && (x.toString != "$iw"))
- }
- // 8.1.4 (d)
- case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern?
- require(!fn.isType && isModule)
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Obj(%s)".format(fn)
- }
- // 8.1.4 (e)
- case class SimpleIdPattern(tree: Ident) extends NamePattern {
- val Ident(name) = tree
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
- override def description = "Id(%s)".format(name)
- }
-
- // 8.1.5
- case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
- require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn)
- def name = tpe.typeSymbol.name
- def cleanName = tpe.typeSymbol.decodedName
- def hasPrefix = tpe.prefix.prefixString != ""
- def prefixedName =
- if (hasPrefix) "%s.%s".format(tpe.prefix.prefixString, cleanName)
- else cleanName
-
- private def isColonColon = cleanName == "::"
-
- override def subpatterns(pm: MatchMatrix#PatternMatch) =
- if (pm.head.isCaseClass) toPats(args)
- else super.subpatterns(pm)
-
- override def simplify(pv: PatternVar) =
- if (args.isEmpty) this rebindToEmpty tree.tpe
- else this
-
- override def covers(sym: Symbol) = {
- debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
- sym.tpe.typeSymbol == this.tpe.typeSymbol
- }
- }
- override def description = {
- if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
- else "%s(%s)".format(name, toPats(args).mkString(", "))
- }
- }
- // 8.1.6
- case class TuplePattern(tree: Apply) extends ApplyPattern {
- override def description = "((%s))".format(args.size, toPats(args).mkString(", "))
- }
-
- // 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
- case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
- private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< arg.tpe) this
- else this rebindTo uaTyped
- }
- override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
- }
-
- // Special List handling. It was like that when I got here.
- case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern {
- // As yet I can't testify this is doing any good relative to using
- // tpt.tpe, but it doesn't seem to hurt either.
- private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = appliedType(ConsClass, packedType)
- private lazy val listRef = appliedType(ListClass, packedType)
- private lazy val seqRef = appliedType(SeqClass, packedType)
-
- private def thisSeqRef = {
- val tc = (tree.tpe baseType SeqClass).typeConstructor
- if (tc.typeParams.size == 1) appliedType(tc, List(packedType))
- else seqRef
- }
-
- // Fold a list into a well-typed x :: y :: etc :: tree.
- private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
- case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
- case _ =>
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
- val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
-
- Apply(TypeTree(consType), List(hd, tl)) setType consRef
- }
- private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y))
- override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< necessaryType)
- Pattern(foldedPatterns)
- else
- this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType)
- }
- override def description = "List(%s => %s)".format(packedType, resTypesString)
- }
-
- trait SequenceLikePattern extends Pattern {
- def elems: List[Tree]
- override def hasStar = elems.nonEmpty && isStar(elems.last)
-
- def elemPatterns = toPats(elems)
- def nonStarElems = if (hasStar) elems.init else elems
- def nonStarPatterns = toPats(nonStarElems)
- def nonStarLength = nonStarElems.length
- }
-
- // 8.1.8 (b) (literal ArrayValues)
- case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern {
- lazy val ArrayValue(elemtpt, elems) = tree
-
- override def subpatternsForVars: List[Pattern] = elemPatterns
- override def description = "Seq(%s)".format(elemPatterns mkString ", ")
- }
-
- // 8.1.8 (c)
- case class StarPattern(tree: Star) extends Pattern {
- lazy val Star(elem) = tree
- override def description = "_*"
- }
- // XXX temporary?
- case class ThisPattern(tree: This) extends NamePattern {
- lazy val This(name) = tree
- override def description = "this"
- }
-
- // 8.1.9
- // InfixPattern ... subsumed by Constructor/Extractor Patterns
-
- // 8.1.10
- case class AlternativePattern(tree: Alternative) extends Pattern {
- private lazy val Alternative(subtrees) = tree
- private def alts = toPats(subtrees)
- override def description = "Alt(%s)".format(alts mkString " | ")
- }
-
- // 8.1.11
- // XMLPattern ... for now, subsumed by SequencePattern, but if we want
- // to make it work right, it probably needs special handling.
-
- private def abortUnknownTree(tree: Tree) =
- abort("Unknown Tree reached pattern matcher: %s/%s".format(tree, tree.getClass))
-
- object Pattern {
- // a small tree -> pattern cache
- private val cache = perRunCaches.newMap[Tree, Pattern]()
-
- def apply(tree: Tree): Pattern = {
- if (cache contains tree)
- return cache(tree)
-
- val p = tree match {
- case x: Bind => apply(unbind(tree)) setBound x
- case EmptyTree => WildcardPattern()
- case Ident(nme.WILDCARD) => WildcardPattern()
- case x @ Alternative(ps) => AlternativePattern(x)
- case x: Apply => ApplyPattern(x)
- case x: Typed => TypedPattern(x)
- case x: Literal => LiteralPattern(x)
- case x: UnApply => UnapplyPattern(x)
- case x: Ident => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x)
- case x: ArrayValue => SequencePattern(x)
- case x: Select => StableIdPattern(x)
- case x: Star => StarPattern(x)
- case x: This => ThisPattern(x) // XXX ?
- case _ => abortUnknownTree(tree)
- }
- cache(tree) = p
-
- // limiting the trace output
- p match {
- case WildcardPattern() => p
- case _: LiteralPattern => p
- case _ => tracing("Pattern")(p)
- }
- }
- // matching on Pattern(...) always skips the bindings.
- def unapply(other: Any): Option[Tree] = other match {
- case x: Tree => unapply(Pattern(x))
- case x: Pattern => Some(x.tree)
- case _ => None
- }
- }
-
- object UnapplyPattern {
- private object UnapplySeq {
- def unapply(x: UnApply) = x match {
- case UnApply(
- Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _),
- List(ArrayValue(_, elems))) =>
- Some((qual.symbol, tpt, elems))
- case _ =>
- None
- }
- }
-
- def apply(x: UnApply): Pattern = x match {
- case UnapplySeq(ListModule, tpt, elems) =>
- ListExtractorPattern(x, tpt, elems)
- case _ =>
- ExtractorPattern(x)
- }
- }
-
- // right now a tree like x @ Apply(fn, Nil) where !fn.isType
- // is handled by creating a singleton type:
- //
- // val stype = Types.singleType(x.tpe.prefix, x.symbol)
- //
- // and then passing that as a type argument to EqualsPatternClass:
- //
- // val tpe = typeRef(NoPrefix, EqualsPatternClass, List(stype))
- //
- // then creating a Typed pattern and rebinding.
- //
- // val newpat = Typed(EmptyTree, TypeTree(tpe)) setType tpe)
- //
- // This is also how Select(qual, name) is handled.
- object ApplyPattern {
- def apply(x: Apply): Pattern = {
- val Apply(fn, args) = x
- def isModule = x.symbol.isModule || x.tpe.termSymbol.isModule
-
- if (fn.isType) {
- if (isTupleType(fn.tpe)) TuplePattern(x)
- else ConstructorPattern(x)
- }
- else if (args.isEmpty) {
- if (isModule) ObjectPattern(x)
- else fn match {
- case _: Ident => ApplyIdentPattern(x)
- case _: Select => ApplySelectPattern(x)
- }
- }
- else abortUnknownTree(x)
- }
- }
-
- /** Some intermediate pattern classes with shared structure **/
-
- sealed trait SelectPattern extends NamePattern {
- def select: Select
- lazy val Select(qualifier, name) = select
- def pathSegments = getPathSegments(tree)
- def backticked: Option[String] = qualifier match {
- case _: This if nme.isVariableName(name) => Some("`%s`".format(name))
- case _ => None
- }
- override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
- protected def getPathSegments(t: Tree): List[Name] = t match {
- case Select(q, name) => name :: getPathSegments(q)
- case Apply(f, Nil) => getPathSegments(f)
- case _ => Nil
- }
- }
-
- sealed trait NamePattern extends Pattern {
- def name: Name
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToEqualsCheck()
- override def description = name.toString
- }
-
- sealed trait UnapplyPattern extends Pattern {
- lazy val UnApply(unfn, args) = tree
- lazy val Apply(fn, _) = unfn
- lazy val MethodType(List(arg, _*), _) = fn.tpe
-
- // Covers if the symbol matches the unapply method's argument type,
- // and the return type of the unapply is Some.
- override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
-
- // TODO: for alwaysCovers:
- // fn.tpe.finalResultType.typeSymbol == SomeClass
-
- override def necessaryType = arg.tpe
- override def subpatternsForVars = args match {
- case List(ArrayValue(elemtpe, elems)) => toPats(elems)
- case _ => toPats(args)
- }
-
- def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args)
- def resTypesString = resTypes match {
- case Nil => "Boolean"
- case xs => xs.mkString(", ")
- }
- }
-
- sealed trait ApplyPattern extends Pattern {
- lazy val Apply(fn, args) = tree
- override def subpatternsForVars: List[Pattern] = toPats(args)
-
- override def dummies =
- if (!this.isCaseClass) Nil
- else emptyPatterns(sufficientType.typeSymbol.caseFieldAccessors.size)
-
- def isConstructorPattern = fn.isType
- override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
- }
-
- sealed abstract class Pattern extends PatternBindingLogic {
- def tree: Tree
-
- // returns either a simplification of this pattern or identity.
- def simplify(pv: PatternVar): Pattern = this
-
- // the right number of dummies for this pattern
- def dummies: List[Pattern] = Nil
-
- // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher)
- def isDefault = false
-
- // what type must a scrutinee have to have any chance of matching this pattern?
- def necessaryType = tpe
-
- // what type could a scrutinee have which would automatically indicate a match?
- // (nullness and guards will still be checked.)
- def sufficientType = tpe
-
- // the subpatterns for this pattern (at the moment, that means constructor arguments)
- def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
-
- // if this pattern should be considered to cover the given symbol
- def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
- def newMatchesPattern(sym: Symbol, pattp: Type) = {
- debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
- (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
- (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
- }
- }
-
- def sym = tree.symbol
- def tpe = tree.tpe
- def isEmpty = tree.isEmpty
-
- def isModule = sym.isModule || tpe.termSymbol.isModule
- def isCaseClass = tpe.typeSymbol.isCase
- def isObject = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable // XXX not entire logic
-
- def hasStar = false
-
- def setType(tpe: Type): this.type = {
- tree setType tpe
- this
- }
-
- def equalsCheck =
- tracing("equalsCheck")(
- if (sym.isValue) singleType(NoPrefix, sym)
- else tpe.narrow
- )
-
- /** Standard methods **/
- override def equals(other: Any) = other match {
- case x: Pattern => this.boundTree == x.boundTree
- case _ => super.equals(other)
- }
- override def hashCode() = boundTree.hashCode()
- def description = super.toString
-
- final override def toString = description
-
- def toTypeString() = "%s <: x <: %s".format(necessaryType, sufficientType)
- def kindString = ""
- }
-
- /*** Extractors ***/
-
- object UnapplyParamType {
- def unapply(x: Tree): Option[Type] = condOpt(unbind(x)) {
- case UnApply(Apply(fn, _), _) => fn.tpe match {
- case m: MethodType => m.paramTypes.head
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala
index 00a9f3b39c..761fd79358 100644
--- a/src/compiler/scala/tools/nsc/package.scala
+++ b/src/compiler/scala/tools/nsc/package.scala
@@ -6,9 +6,17 @@
package scala.tools
package object nsc {
+ type Mode = scala.reflect.internal.Mode
+ val Mode = scala.reflect.internal.Mode
+
+ def EXPRmode = Mode.EXPRmode
+
type Phase = scala.reflect.internal.Phase
val NoPhase = scala.reflect.internal.NoPhase
+ type Variance = scala.reflect.internal.Variance
+ val Variance = scala.reflect.internal.Variance
+
type FatalError = scala.reflect.internal.FatalError
val FatalError = scala.reflect.internal.FatalError
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
index 2050ce7ffd..a584a4ed5d 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala
@@ -6,13 +6,14 @@
package scala.tools.nsc
package plugins
-import io.{ File, Path, Jar }
-import java.net.URLClassLoader
-import java.util.jar.JarFile
+import scala.tools.nsc.io.{ Jar }
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.reflect.io.{ Directory, File, Path }
+import java.io.InputStream
import java.util.zip.ZipException
-import scala.collection.mutable
-import mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
+import scala.util.{ Try, Success, Failure }
import scala.xml.XML
/** Information about a plugin loaded from a jar file.
@@ -37,11 +38,13 @@ abstract class Plugin {
val description: String
/** The compiler that this plugin uses. This is normally equated
- * to a constructor parameter in the concrete subclass. */
+ * to a constructor parameter in the concrete subclass.
+ */
val global: Global
/** Handle any plugin-specific options. The `-P:plugname:` part
- * will not be present. */
+ * will not be present.
+ */
def processOptions(options: List[String], error: String => Unit) {
if (!options.isEmpty)
error("Error: " + name + " has no options")
@@ -63,90 +66,83 @@ object Plugin {
private val PluginXML = "scalac-plugin.xml"
- /** Create a class loader with the specified file plus
+ /** Create a class loader with the specified locations plus
* the loader that loaded the Scala compiler.
*/
- private def loaderFor(jarfiles: Seq[Path]): ClassLoader = {
+ private def loaderFor(locations: Seq[Path]): ScalaClassLoader = {
val compilerLoader = classOf[Plugin].getClassLoader
- val jarurls = jarfiles map (_.toURL)
+ val urls = locations map (_.toURL)
- new URLClassLoader(jarurls.toArray, compilerLoader)
+ ScalaClassLoader fromURLs (urls, compilerLoader)
}
- /** Try to load a plugin description from the specified
- * file, returning <code>None</code> if it does not work.
+ /** Try to load a plugin description from the specified location.
*/
- private def loadDescription(jarfile: Path): Option[PluginDescription] =
- // XXX Return to this once we have some ARM support
- if (!jarfile.exists) None
- else try {
- val jar = new JarFile(jarfile.jfile)
-
- try {
- jar getEntry PluginXML match {
- case null => None
- case entry =>
- val in = jar getInputStream entry
- val packXML = XML load in
- in.close()
-
- PluginDescription fromXML packXML
- }
- }
- finally jar.close()
- }
- catch {
- case _: ZipException => None
+ private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = {
+ // XXX Return to this once we have more ARM support
+ def read(is: Option[InputStream]) = is match {
+ case None => throw new RuntimeException(s"Missing $PluginXML in $jarp")
+ case _ => PluginDescription fromXML (XML load is.get)
}
+ Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read))
+ }
+
+ private def loadDescriptionFromFile(f: Path): Try[PluginDescription] =
+ Try(XML loadFile f.jfile) map (PluginDescription fromXML _)
type AnyClass = Class[_]
- /** Loads a plugin class from the named jar file.
- *
- * @return `None` if the jar file has no plugin in it or
- * if the plugin is badly formed.
+ /** Use a class loader to load the plugin class.
*/
- def loadFrom(jarfile: Path, loader: ClassLoader): Option[AnyClass] =
- loadDescription(jarfile) match {
- case None =>
- println("Warning: could not load descriptor for plugin %s".format(jarfile))
- None
- case Some(pdesc) =>
- try Some(loader loadClass pdesc.classname) catch {
- case _: Exception =>
- println("Warning: class not found for plugin in %s (%s)".format(jarfile, pdesc.classname))
- None
- }
+ def load(classname: String, loader: ClassLoader): Try[AnyClass] = {
+ Try[AnyClass] {
+ loader loadClass classname
+ } recoverWith {
+ case _: Exception =>
+ Failure(new RuntimeException(s"Warning: class not found: ${classname}"))
}
+ }
- /** Load all plugins found in the argument list, both in the
- * jar files explicitly listed, and in the jar files in the
- * directories specified. Skips all plugins in `ignoring`.
+ /** Load all plugins specified by the arguments.
+ * Each of `jars` must be a valid plugin archive or exploded archive.
+ * Each of `dirs` may be a directory containing arbitrary plugin archives.
+ * Skips all plugins named in `ignoring`.
* A single classloader is created and used to load all of them.
*/
def loadAllFrom(
jars: List[Path],
dirs: List[Path],
- ignoring: List[String]): List[AnyClass] =
+ ignoring: List[String]): List[Try[AnyClass]] =
{
- val alljars = (jars ::: (for {
- dir <- dirs if dir.isDirectory
- entry <- dir.toDirectory.files.toList sortBy (_.name)
-// was: if Path.isJarOrZip(entry)
- if Jar.isJarOrZip(entry)
- pdesc <- loadDescription(entry)
- if !(ignoring contains pdesc.name)
- } yield entry)).distinct
-
- val loader = loaderFor(alljars)
- (alljars map (loadFrom(_, loader))).flatten
+ // List[(jar, Success(descriptor))] in dir
+ def scan(d: Directory) = for {
+ f <- d.files.toList sortBy (_.name)
+ if Jar isJarOrZip f
+ pd = loadDescriptionFromJar(f)
+ if pd.isSuccess
+ } yield (f, pd)
+ // (dir, Try(descriptor))
+ def explode(d: Directory) = d -> loadDescriptionFromFile(d / PluginXML)
+ // (j, Try(descriptor))
+ def required(j: Path) = j -> loadDescriptionFromJar(j)
+
+ type Paired = Pair[Path, Try[PluginDescription]]
+ val included: List[Paired] = (dirs flatMap (_ ifDirectory scan)).flatten
+ val exploded: List[Paired] = jars flatMap (_ ifDirectory explode)
+ val explicit: List[Paired] = jars flatMap (_ ifFile required)
+ def ignored(p: Paired) = p match {
+ case (path, Success(pd)) => ignoring contains pd.name
+ case _ => false
+ }
+ val (locs, pds) = ((explicit ::: exploded ::: included) filterNot ignored).unzip
+ val loader = loaderFor(locs.distinct)
+ (pds filter (_.isSuccess) map (_.get.classname)).distinct map (Plugin load (_, loader))
}
/** Instantiate a plugin class, given the class and
* the compiler it is to be used in.
*/
def instantiate(clazz: AnyClass, global: Global): Plugin = {
- val constructor = clazz getConstructor classOf[Global]
- (constructor newInstance global).asInstanceOf[Plugin]
+ (clazz getConstructor classOf[Global] newInstance global).asInstanceOf[Plugin]
}
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
index 4d98b2563c..c6e1af7ea4 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala
@@ -18,8 +18,12 @@ abstract class PluginComponent extends SubComponent {
/** Internal flag to tell external from internal phases */
final override val internal = false
- /** Phases supplied by plugins should not have give the runsRightAfter constraint,
- * but can override it */
+ /** Phases supplied by plugins should not have to supply the
+ * runsRightAfter constraint, but can override it.
+ */
val runsRightAfter: Option[String] = None
+ /** Useful for -Xshow-phases. */
+ def description: String = ""
+
}
diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
index bd567400fb..27693d1a45 100644
--- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
+++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala
@@ -6,27 +6,22 @@
package scala.tools.nsc
package plugins
-import scala.xml.{Node,NodeSeq}
+import scala.xml.Node
/** A description of a compiler plugin, suitable for serialization
* to XML for inclusion in the plugin's .jar file.
*
* @author Lex Spoon
* @version 1.0, 2007-5-21
+ * @param name A short name of the plugin, used to identify it in
+ * various contexts. The phase defined by the plugin
+ * should have the same name.
+ * @param classname The name of the main Plugin class.
*/
-abstract class PluginDescription {
-
- /** A short name of the compiler, used to identify it in
- * various contexts. The phase defined by the plugin
- * should have the same name.
- */
- val name: String
-
- /** The name of the main class for the plugin */
- val classname: String
+case class PluginDescription(name: String, classname: String) {
/** An XML representation of this description. It can be
- * read back using <code>PluginDescription.fromXML</code>.
+ * read back using `PluginDescription.fromXML`.
* It should be stored inside the jar archive file.
*/
def toXML: Node = {
@@ -44,32 +39,24 @@ abstract class PluginDescription {
*/
object PluginDescription {
- def fromXML(xml: Node): Option[PluginDescription] = {
- // check the top-level tag
- xml match {
- case <plugin>{_*}</plugin> => ()
- case _ => return None
- }
+ def fromXML(xml: Node): PluginDescription = {
// extract one field
def getField(field: String): Option[String] = {
val text = (xml \\ field).text.trim
if (text == "") None else Some(text)
}
-
- // extract the required fields
- val name1 = getField("name") match {
- case None => return None
- case Some(str) => str
+ def extracted = {
+ val name = "name"
+ val claas = "classname"
+ val vs = Map(name -> getField(name), claas -> getField(claas))
+ if (vs.values exists (_.isEmpty)) fail()
+ else PluginDescription(name = vs(name).get, classname = vs(claas).get)
}
- val classname1 = getField("classname") match {
- case None => return None
- case Some(str) => str
+ def fail() = throw new RuntimeException("Bad plugin descriptor.")
+ // check the top-level tag
+ xml match {
+ case <plugin>{_*}</plugin> => extracted
+ case _ => fail()
}
-
- Some(new PluginDescription {
- val name = name1
- val classname = classname1
- })
}
-
}
diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
index 736bd826e4..8f7794fa90 100644
--- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala
+++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala
@@ -7,7 +7,8 @@
package scala.tools.nsc
package plugins
-import io.{ File, Path }
+import scala.reflect.io.{ File, Path }
+import scala.tools.util.PathResolver.Defaults
/** Support for run-time loading of compiler plugins.
*
@@ -25,8 +26,13 @@ trait Plugins {
*/
protected def loadRoughPluginsList(): List[Plugin] = {
val jars = settings.plugin.value map Path.apply
- val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply
- val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
+ def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s
+ val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map injectDefault map Path.apply
+ val maybes = Plugin.loadAllFrom(jars, dirs, settings.disable.value)
+ val (goods, errors) = maybes partition (_.isSuccess)
+ // Explicit parameterization of recover to suppress -Xlint warning about inferred Any
+ errors foreach (_.recover[Any] { case e: Exception => inform(e.getMessage) })
+ val classes = goods map (_.get) // flatten
// Each plugin must only be instantiated once. A common pattern
// is to register annotation checkers during object construction, so
@@ -34,7 +40,7 @@ trait Plugins {
classes map (Plugin.instantiate(_, this))
}
- protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList
+ protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList()
/** Load all available plugins. Skips plugins that
* either have the same name as another one, or which
@@ -55,7 +61,7 @@ trait Plugins {
def withPlug = plug :: pick(tail, plugNames + plug.name, phaseNames ++ plugPhaseNames)
lazy val commonPhases = phaseNames intersect plugPhaseNames
- def note(msg: String): Unit = if (settings.verbose.value) inform(msg format plug.name)
+ def note(msg: String): Unit = if (settings.verbose) inform(msg format plug.name)
def fail(msg: String) = { note(msg) ; withoutPlug }
if (plugNames contains plug.name)
@@ -72,11 +78,11 @@ trait Plugins {
val plugs = pick(roughPluginsList, Set(), (phasesSet map (_.phaseName)).toSet)
- /** Verify requirements are present. */
+ /* Verify requirements are present. */
for (req <- settings.require.value ; if !(plugs exists (_.name == req)))
globalError("Missing required plugin: " + req)
- /** Process plugin options. */
+ /* Process plugin options. */
def namec(plug: Plugin) = plug.name + ":"
def optList(xs: List[String], p: Plugin) = xs filter (_ startsWith namec(p))
def doOpts(p: Plugin): List[String] =
@@ -88,14 +94,14 @@ trait Plugins {
p.processOptions(opts, globalError)
}
- /** Verify no non-existent plugin given with -P */
+ /* Verify no non-existent plugin given with -P */
for (opt <- settings.pluginOptions.value ; if plugs forall (p => optList(List(opt), p).isEmpty))
globalError("bad option: -P:" + opt)
plugs
}
- lazy val plugins: List[Plugin] = loadPlugins
+ lazy val plugins: List[Plugin] = loadPlugins()
/** A description of all the plugins that are loaded */
def pluginDescriptions: String =
@@ -106,7 +112,7 @@ trait Plugins {
* @see phasesSet
*/
protected def computePluginPhases(): Unit =
- phasesSet ++= (plugins flatMap (_.components))
+ for (p <- plugins; c <- p.components) addToPhasesSet(c, c.description)
/** Summary of the options for all loaded plugins */
def pluginOptionsHelp: String =
diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
index c7ee11dec0..a67c207820 100644
--- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala
@@ -21,19 +21,15 @@ abstract class AbstractReporter extends Reporter {
private val positions = new mutable.HashMap[Position, Severity]
override def reset() {
- super.reset
- positions.clear
+ super.reset()
+ positions.clear()
}
private def isVerbose = settings.verbose.value
private def noWarnings = settings.nowarnings.value
private def isPromptSet = settings.prompt.value
- protected def info0(pos: Position, msg: String, _severity: Severity, force: Boolean) {
- val severity =
- if (settings.fatalWarnings.value && _severity == WARNING) ERROR
- else _severity
-
+ protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) {
if (severity == INFO) {
if (isVerbose || force) {
severity.count += 1
@@ -47,13 +43,13 @@ abstract class AbstractReporter extends Reporter {
if (!hidden || isPromptSet) {
severity.count += 1
display(pos, msg, severity)
- } else if (settings.debug.value) {
+ } else if (settings.debug) {
severity.count += 1
display(pos, "[ suppressed ] " + msg, severity)
}
if (isPromptSet)
- displayPrompt
+ displayPrompt()
}
}
}
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index e847fb5b86..fdb5c72c3d 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
@@ -34,9 +35,6 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
/** Returns the number of errors issued totally as a string.
- *
- * @param severity ...
- * @return ...
*/
private def getCountString(severity: Severity): String =
StringOps.countElementsAsString((severity).count, label(severity))
@@ -52,17 +50,12 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
printMessage(pos, clabel(severity) + msg)
}
- /**
- * @param pos ...
- */
def printSourceLine(pos: Position) {
printMessage(pos.lineContent.stripLineEnd)
printColumnMarker(pos)
}
/** Prints the column marker of the given position.
- *
- * @param pos ...
*/
def printColumnMarker(pos: Position) =
if (pos.isDefined) { printMessage(" " * (pos.column - 1) + "^") }
@@ -94,6 +87,5 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
}
}
- private def abort(msg: String) = throw new Error(msg)
override def flush() { writer.flush() }
}
diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
index 8871ae6555..0544da5d3c 100644
--- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package reporters
import scala.reflect.internal.util._
-import scala.reflect.internal.util.StringOps._
/**
* This interface provides methods to issue information, warning and
@@ -59,15 +58,15 @@ abstract class Reporter {
/** For sending a message which should not be labeled as a warning/error,
* but also shouldn't require -verbose to be visible.
*/
- def echo(msg: String): Unit = info(NoPosition, msg, true)
- def echo(pos: Position, msg: String): Unit = info(pos, msg, true)
+ def echo(msg: String): Unit = info(NoPosition, msg, force = true)
+ def echo(pos: Position, msg: String): Unit = info(pos, msg, force = true)
/** Informational messages, suppressed unless -verbose or force=true. */
def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force)
/** Warnings and errors. */
- def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false))
- def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false))
+ def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, force = false))
+ def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, force = false))
def incompleteInputError(pos: Position, msg: String): Unit = {
if (incompleteHandled) incompleteHandler(pos, msg)
else error(pos, msg)
diff --git a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
index 10e9982594..3aecc06b1e 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/Mixer.scala
@@ -2,9 +2,6 @@ package scala.tools.nsc.scratchpad
import java.io.{FileInputStream, InputStreamReader, IOException}
-import scala.runtime.ScalaRunTime.stringOf
-import java.lang.reflect.InvocationTargetException
-import scala.reflect.runtime.ReflectionUtils._
import scala.collection.mutable.ArrayBuffer
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
diff --git a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
index 01dccd7521..61c1717fea 100644
--- a/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
+++ b/src/compiler/scala/tools/nsc/scratchpad/SourceInserter.scala
@@ -1,8 +1,6 @@
package scala.tools.nsc
package scratchpad
-import java.io.Writer
-import scala.reflect.internal.util.SourceFile
import scala.reflect.internal.Chars._
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
index 783e249931..c9718f711a 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package settings
trait AbsScalaSettings {
diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
index adabeb02a3..4727e6d867 100644
--- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala
@@ -47,8 +47,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
}
})
- implicit lazy val SettingOrdering: Ordering[Setting] = Ordering.ordered
-
trait AbsSetting extends Ordered[Setting] with AbsSettingValue {
def name: String
def helpDescription: String
@@ -83,14 +81,6 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
this
}
- /** If the appearance of the setting should halt argument processing. */
- private var isTerminatorSetting = false
- def shouldStopProcessing = isTerminatorSetting
- def stopProcessing(): this.type = {
- isTerminatorSetting = true
- this
- }
-
/** Issue error and return */
def errorAndValue[T](msg: String, x: T): T = { errorFn(msg) ; x }
@@ -110,6 +100,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
/** Attempt to set from a properties file style property value.
* Currently used by Eclipse SDT only.
+ * !!! Needs test.
*/
def tryToSetFromPropertyValue(s: String): Unit = tryToSet(s :: Nil)
@@ -133,7 +124,7 @@ trait AbsSettings extends scala.reflect.internal.settings.AbsSettings {
case _ => false
}
override def hashCode() = name.hashCode + value.hashCode
- override def toString() = name + " = " + value
+ override def toString() = name + " = " + (if (value == "") "\"\"" else value)
}
trait InternalSetting extends AbsSetting {
diff --git a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
deleted file mode 100644
index 0bec113743..0000000000
--- a/src/compiler/scala/tools/nsc/settings/AdvancedScalaSettings.scala
+++ /dev/null
@@ -1,77 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-trait AdvancedScalaSettings {
- self: AbsScalaSettings =>
-
- abstract class X extends SettingGroup("-X") {
- val assemextdirs: StringSetting
- val assemname: StringSetting
- val assempath: StringSetting
- val checkinit: BooleanSetting
- val disableassertions: BooleanSetting
- val elidebelow: IntSetting
- val experimental: BooleanSetting
- val future: BooleanSetting
- val generatephasegraph: StringSetting
- val logimplicits: BooleanSetting
- val mainClass: StringSetting
- val migration: BooleanSetting
- val noforwarders: BooleanSetting
- val nojline: BooleanSetting
- val nouescape: BooleanSetting
- val plugin: MultiStringSetting
- val plugindisable: MultiStringSetting
- val pluginlist: BooleanSetting
- val pluginrequire: MultiStringSetting
- val pluginsdir: StringSetting
- val print: PhasesSetting
- val printicode: BooleanSetting
- val printpos: BooleanSetting
- val printtypes: BooleanSetting
- val prompt: BooleanSetting
- val resident: BooleanSetting
- val script: StringSetting
- val showclass: StringSetting
- val showobject: StringSetting
- val showphases: BooleanSetting
- val sourcedir: StringSetting
- val sourcereader: StringSetting
- }
- // def Xexperimental = X.experimental
- // def Xmigration28 = X.migration
- // def Xnojline = X.nojline
- // def Xprint = X.print
- // def Xprintpos = X.printpos
- // def Xshowcls = X.showclass
- // def Xshowobj = X.showobject
- // def assemextdirs = X.assemextdirs
- // def assemname = X.assemname
- // def assemrefs = X.assempath
- // def checkInit = X.checkinit
- // def disable = X.plugindisable
- // def elideLevel = X.elidelevel
- // def future = X.future
- // def genPhaseGraph = X.generatephasegraph
- // def logimplicits = X.logimplicits
- // def noForwarders = X.noforwarders
- // def noassertions = X.disableassertions
- // def nouescape = X.nouescape
- // def plugin = X.plugin
- // def pluginsDir = X.pluginsdir
- // def printtypes = X.printtypes
- // def prompt = X.prompt
- // def require = X.require
- // def resident = X.resident
- // def script = X.script
- // def showPhases = X.showphases
- // def showPlugins = X.pluginlist
- // def sourceReader = X.sourcereader
- // def sourcedir = X.sourcedir
- // def writeICode = X.printicode
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala b/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
deleted file mode 100644
index da2c89d707..0000000000
--- a/src/compiler/scala/tools/nsc/settings/AestheticSettings.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools.nsc
-package settings
-
-/** Taking flag checking to a somewhat higher level. */
-trait AestheticSettings {
- def settings: Settings
-
- // Some(value) if setting has been set by user, None otherwise.
- def optSetting[T](s: Settings#Setting): Option[T] =
- if (s.isDefault) None else Some(s.value.asInstanceOf[T])
-
- def script = optSetting[String](settings.script)
- def encoding = optSetting[String](settings.encoding)
- def sourceReader = optSetting[String](settings.sourceReader)
-
- def debug = settings.debug.value
- def declsOnly = false
- def deprecation = settings.deprecation.value
- def experimental = settings.Xexperimental.value
- def fatalWarnings = settings.fatalWarnings.value
- def feature = settings.feature.value
- def future = settings.future.value
- def logClasspath = settings.Ylogcp.value
- def printStats = settings.Ystatistics.value
- def target = settings.target.value
- def unchecked = settings.unchecked.value
- def verbose = settings.verbose.value
- def virtPatmat = !settings.XoldPatmat.value
-
- /** Derived values */
- def jvm = target startsWith "jvm"
- def msil = target == "msil"
- def verboseDebug = debug && verbose
-}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 5c852ae07c..34c8e8df9a 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -8,7 +8,7 @@ package nsc
package settings
import util.ClassPath
-import io.{ Directory, Path, AbstractFile }
+import io.{ Path, AbstractFile }
class FscSettings(error: String => Unit) extends Settings(error) {
outer =>
@@ -38,7 +38,7 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
-
+
override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
val (r, args) = super.processArguments(arguments, processAll)
// we need to ensure the files specified with relative locations are absolutized based on the currentDir
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index e4f99474e1..cd23ad74e4 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -10,7 +10,6 @@ package settings
import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory }
import scala.reflect.internal.util.StringOps
-import scala.collection.mutable.ListBuffer
import scala.io.Source
import scala.reflect.{ ClassTag, classTag }
@@ -63,40 +62,33 @@ class MutableSettings(val errorFn: String => Unit)
(checkDependencies, residualArgs)
case "--" :: xs =>
(checkDependencies, xs)
+ // discard empties, sometimes they appear because of ant or etc.
+ // but discard carefully, because an empty string is valid as an argument
+ // to an option, e.g. -cp "" . So we discard them only when they appear
+ // where an option should be, not where an argument to an option should be.
+ case "" :: xs =>
+ loop(xs, residualArgs)
case x :: xs =>
- val isOpt = x startsWith "-"
- if (isOpt) {
- val newArgs = parseParams(args)
- if (args eq newArgs) {
- errorFn(s"bad option: '$x'")
- (false, args)
+ if (x startsWith "-") {
+ parseParams(args) match {
+ case newArgs if newArgs eq args => errorFn(s"bad option: '$x'") ; (false, args)
+ case newArgs => loop(newArgs, residualArgs)
}
- // discard empties, sometimes they appear because of ant or etc.
- // but discard carefully, because an empty string is valid as an argument
- // to an option, e.g. -cp "" . So we discard them only when they appear
- // in option position.
- else if (x == "") {
- loop(xs, residualArgs)
- }
- else lookupSetting(x) match {
- case Some(s) if s.shouldStopProcessing => (checkDependencies, newArgs)
- case _ => loop(newArgs, residualArgs)
- }
- }
- else {
- if (processAll) loop(xs, residualArgs :+ x)
- else (checkDependencies, args)
}
+ else if (processAll)
+ loop(xs, residualArgs :+ x)
+ else
+ (checkDependencies, args)
}
loop(arguments, Nil)
}
- def processArgumentString(params: String) = processArguments(splitParams(params), true)
+ def processArgumentString(params: String) = processArguments(splitParams(params), processAll = true)
/** Create a new Settings object, copying all user-set values.
*/
def copy(): Settings = {
val s = new Settings()
- s.processArguments(recreateArgs, true)
+ s.processArguments(recreateArgs, processAll = true)
s
}
@@ -134,7 +126,7 @@ class MutableSettings(val errorFn: String => Unit)
// if arg is of form -Xfoo:bar,baz,quux
def parseColonArg(s: String): Option[List[String]] = {
- val (p, args) = StringOps.splitWhere(s, _ == ':', true) getOrElse (return None)
+ val (p, args) = StringOps.splitWhere(s, _ == ':', doDropIndex = true) getOrElse (return None)
// any non-Nil return value means failure and we return s unmodified
tryToSetIfExists(p, (args split ",").toList, (s: Setting) => s.tryToSetColon _)
@@ -184,7 +176,7 @@ class MutableSettings(val errorFn: String => Unit)
* The class loader defining `T` should provide resources `app.class.path`
* and `boot.class.path`. These resources should contain the application
* and boot classpaths in the same form as would be passed on the command line.*/
- def embeddedDefaults[T: ClassTag]: Unit =
+ def embeddedDefaults[T: ClassTag]: Unit = // called from sbt and repl
embeddedDefaults(classTag[T].runtimeClass.getClassLoader)
/** Initializes these settings for embedded use by a class from the given class loader.
@@ -248,7 +240,7 @@ class MutableSettings(val errorFn: String => Unit)
/** Add a destination directory for sources found under srcdir.
* Both directories should exits.
*/
- def add(srcDir: String, outDir: String): Unit =
+ def add(srcDir: String, outDir: String): Unit = // used in ide?
add(checkDir(AbstractFile.getDirectory(srcDir), srcDir),
checkDir(AbstractFile.getDirectory(outDir), outDir))
@@ -256,8 +248,7 @@ class MutableSettings(val errorFn: String => Unit)
private def checkDir(dir: AbstractFile, name: String, allowJar: Boolean = false): AbstractFile = (
if (dir != null && dir.isDirectory)
dir
-// was: else if (allowJar && dir == null && Path.isJarOrZip(name, false))
- else if (allowJar && dir == null && Jar.isJarOrZip(name, false))
+ else if (allowJar && dir == null && Jar.isJarOrZip(name, examineFile = false))
new PlainFile(Path(name))
else
throw new FatalError(name + " does not exist or is not a directory")
@@ -268,7 +259,7 @@ class MutableSettings(val errorFn: String => Unit)
*/
def setSingleOutput(outDir: String) {
val dst = AbstractFile.getDirectory(outDir)
- setSingleOutput(checkDir(dst, outDir, true))
+ setSingleOutput(checkDir(dst, outDir, allowJar = true))
}
def getSingleOutput: Option[AbstractFile] = singleOutDir
@@ -331,12 +322,12 @@ class MutableSettings(val errorFn: String => Unit)
case Some(d) =>
d match {
case _: VirtualDirectory | _: io.ZipArchive => Nil
- case _ => List(d.lookupPathUnchecked(srcPath, false))
+ case _ => List(d.lookupPathUnchecked(srcPath, directory = false))
}
case None =>
(outputs filter (isBelow _).tupled) match {
case Nil => Nil
- case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false))
+ case matches => matches.map(_._1.lookupPathUnchecked(srcPath, directory = false))
}
}
}
@@ -390,7 +381,7 @@ class MutableSettings(val errorFn: String => Unit)
def max = range map (_._2) getOrElse IntMax
override def value_=(s: Int) =
- if (isInputValid(s)) super.value_=(s) else errorMsg
+ if (isInputValid(s)) super.value_=(s) else errorMsg()
// Validate that min and max are consistent
assert(min <= max)
@@ -422,7 +413,7 @@ class MutableSettings(val errorFn: String => Unit)
if (args.isEmpty) errorAndValue("missing argument", None)
else parseArgument(args.head) match {
case Some(i) => value = i ; Some(args.tail)
- case None => errorMsg ; None
+ case None => errorMsg() ; None
}
def unparse: List[String] =
@@ -443,7 +434,7 @@ class MutableSettings(val errorFn: String => Unit)
def tryToSet(args: List[String]) = { value = true ; Some(args) }
def unparse: List[String] = if (value) List(name) else Nil
- override def tryToSetFromPropertyValue(s : String) {
+ override def tryToSetFromPropertyValue(s : String) { // used from ide
value = s.equalsIgnoreCase("true")
}
}
@@ -494,8 +485,6 @@ class MutableSettings(val errorFn: String => Unit)
descr: String,
default: ScalaVersion)
extends Setting(name, descr) {
- import ScalaVersion._
-
type T = ScalaVersion
protected var v: T = NoScalaVersion
@@ -503,14 +492,14 @@ class MutableSettings(val errorFn: String => Unit)
value = default
Some(args)
}
-
+
override def tryToSetColon(args: List[String]) = args match {
case Nil => value = default; Some(Nil)
case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
}
-
+
override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
-
+
def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
withHelpSyntax(s"${name}:<${arg}>")
@@ -565,7 +554,7 @@ class MutableSettings(val errorFn: String => Unit)
Some(rest)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
- override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList)
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
@@ -599,7 +588,7 @@ class MutableSettings(val errorFn: String => Unit)
}
def unparse: List[String] =
if (value == default) Nil else List(name + ":" + value)
- override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil)
+ override def tryToSetFromPropertyValue(s: String) = tryToSetColon(s::Nil) // used from ide
withHelpSyntax(name + ":<" + helpArg + ">")
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index dbfaa2c531..fe9165203f 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.tools
+package scala
+package tools
package nsc
package settings
@@ -38,11 +39,12 @@ trait ScalaSettings extends AbsScalaSettings
protected def futureSettings = List[BooleanSetting]()
/** Enabled under -optimise. */
- protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce)
+ protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
/** Internal use - syntax enhancements. */
private class EnableSettings[T <: BooleanSetting](val s: T) {
def enabling(toEnable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toEnable foreach (_.value = s.value))
+ def disabling(toDisable: List[BooleanSetting]): s.type = s withPostSetHook (_ => toDisable foreach (_.value = !s.value))
def andThen(f: s.T => Unit): s.type = s withPostSetHook (setting => f(setting.value))
}
private implicit def installEnableSettings[T <: BooleanSetting](s: T) = new EnableSettings(s)
@@ -52,14 +54,14 @@ trait ScalaSettings extends AbsScalaSettings
val jvmargs = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
- val toolcp = PathSetting("-toolcp", "Add to the runner classpath.", "")
+ /*val toolcp =*/ PathSetting("-toolcp", "Add to the runner classpath.", "")
val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
/**
* Standard settings
*/
// argfiles is only for the help message
- val argfiles = BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
+ /*val argfiles = */ BooleanSetting ("@<file>", "A text file containing compiler arguments (options and source files)")
val classpath = PathSetting ("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
val d = OutputSetting (outputDirs, ".")
val nospecialization = BooleanSetting ("-no-specialization", "Ignore @specialize annotations.")
@@ -69,11 +71,8 @@ trait ScalaSettings extends AbsScalaSettings
* -X "Advanced" settings
*/
val Xhelp = BooleanSetting ("-X", "Print a synopsis of advanced options.")
- val assemname = StringSetting ("-Xassem-name", "file", "(Requires -target:msil) Name of the output assembly.", "").dependsOn(target, "msil")
- val assemrefs = StringSetting ("-Xassem-path", "path", "(Requires -target:msil) List of assemblies referenced by the program.", ".").dependsOn(target, "msil")
- val assemextdirs = StringSetting ("-Xassem-extdirs", "dirs", "(Requires -target:msil) List of directories containing assemblies. default:lib", Defaults.scalaLibDir.path).dependsOn(target, "msil")
- val sourcedir = StringSetting ("-Xsourcedir", "directory", "(Requires -target:msil) Mirror source folder structure in output directory.", ".").dependsOn(target, "msil")
val checkInit = BooleanSetting ("-Xcheckinit", "Wrap field accessors to throw an exception on uninitialized access.")
+ val developer = BooleanSetting ("-Xdev", "Indicates user is a developer - issue warnings about anything which seems amiss")
val noassertions = BooleanSetting ("-Xdisable-assertions", "Generate no assertions or assumptions.")
val elidebelow = IntSetting ("-Xelide-below", "Calls to @elidable methods are omitted if method priority is lower than argument",
elidable.MINIMUM, None, elidable.byName get _)
@@ -107,15 +106,12 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
- val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
- val Xdivergence211 = BooleanSetting ("-Xdivergence211", "Turn on the 2.11 behavior of implicit divergence not terminating recursive implicit searches (SI-7291).")
/** Compatibility stubs for options whose value name did
* not previously match the option name.
*/
- def XO = optimise
def debuginfo = g
def dependenciesFile = dependencyfile
def nowarnings = nowarn
@@ -128,10 +124,12 @@ trait ScalaSettings extends AbsScalaSettings
val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.")
val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.")
val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.")
+ val breakCycles = BooleanSetting ("-Ybreak-cycles", "Attempt to break cycles encountered during typing")
val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after")
val check = PhasesSetting ("-Ycheck", "Check the tree at the end of")
val Yshow = PhasesSetting ("-Yshow", "(Requires -Xshow-class or -Xshow-object) Show after")
val Xcloselim = BooleanSetting ("-Yclosure-elim", "Perform closure elimination.")
+ val YconstOptimization = BooleanSetting ("-Yconst-opt", "Perform optimization with constant values.")
val Ycompacttrees = BooleanSetting ("-Ycompact-trees", "Use compact tree printer when displaying trees.")
val noCompletion = BooleanSetting ("-Yno-completion", "Disable tab-completion in the REPL.")
val Xdce = BooleanSetting ("-Ydead-code", "Perform dead code elimination.")
@@ -159,33 +157,28 @@ trait ScalaSettings extends AbsScalaSettings
val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.")
val skip = PhasesSetting ("-Yskip", "Skip")
val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "")
+ val Ygenasmp = StringSetting ("-Ygen-asmp", "dir", "Generate a parallel output directory of .asmp files (ie ASM Textifier output).", "")
val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "")
val Ynosqueeze = BooleanSetting ("-Yno-squeeze", "Disable creation of compact code in matching.")
val Ystatistics = BooleanSetting ("-Ystatistics", "Print compiler statistics.") andThen (scala.reflect.internal.util.Statistics.enabled = _)
val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat
val stopBefore = PhasesSetting ("-Ystop-before", "Stop before")
- val refinementMethodDispatch
- = ChoiceSetting ("-Ystruct-dispatch", "policy", "structural method dispatch policy", List("no-cache", "mono-cache", "poly-cache", "invoke-dynamic"), "poly-cache")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
+ val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true)
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
- val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
- val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
+ val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
- val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
+ val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").
+ withDeprecationMessage("This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug.")
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
- val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
+ val YdisableUnreachablePrevention = BooleanSetting("-Ydisable-unreachable-prevention", "Disable the prevention of unreachable blocks in code generation.")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
- def stop = stopAfter
-
/** Area-specific debug output.
*/
- val Ybuildmanagerdebug = BooleanSetting("-Ybuild-manager-debug", "Generate debug information for the Refined Build Manager compiler.")
- val Ycompletion = BooleanSetting("-Ycompletion-debug", "Trace all tab completion activity.")
val Ydocdebug = BooleanSetting("-Ydoc-debug", "Trace all scaladoc activity.")
val Yidedebug = BooleanSetting("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
val Yinferdebug = BooleanSetting("-Yinfer-debug", "Trace type inference and implicit search.")
@@ -195,7 +188,6 @@ trait ScalaSettings extends AbsScalaSettings
val Ypmatdebug = BooleanSetting("-Ypmat-debug", "Trace all pattern matcher activity.")
val Yposdebug = BooleanSetting("-Ypos-debug", "Trace position validation.")
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
- val Yrepldebug = BooleanSetting("-Yrepl-debug", "Trace all repl activity.") andThen (interpreter.replProps.debug setValue _)
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
@@ -203,6 +195,7 @@ trait ScalaSettings extends AbsScalaSettings
*/
val future = BooleanSetting("-Xfuture", "Turn on future language features.") enabling futureSettings
val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize" enabling optimiseSettings
+ val nooptimise = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings
val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
// Feature extensions
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index d6a0149411..da1cc0c4cf 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.tools.nsc.settings
+package scala
+package tools.nsc.settings
/**
* Represents a single Scala version in a manner that
@@ -19,7 +20,7 @@ abstract class ScalaVersion extends Ordered[ScalaVersion] {
*/
case object NoScalaVersion extends ScalaVersion {
def unparse = "none"
-
+
def compare(that: ScalaVersion): Int = that match {
case NoScalaVersion => 0
case _ => 1
@@ -33,7 +34,7 @@ case object NoScalaVersion extends ScalaVersion {
* to segregate builds
*/
case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
- def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
def compare(that: ScalaVersion): Int = that match {
case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
@@ -48,7 +49,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
else build compare thatBuild
case AnyScalaVersion => 1
case NoScalaVersion => -1
- }
+ }
}
/**
@@ -56,7 +57,7 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu
*/
case object AnyScalaVersion extends ScalaVersion {
def unparse = "any"
-
+
def compare(that: ScalaVersion): Int = that match {
case AnyScalaVersion => 0
case _ => -1
@@ -70,7 +71,7 @@ object ScalaVersion {
private val dot = "\\."
private val dash = "\\-"
private def not(s:String) = s"[^${s}]"
- private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
def errorAndValue() = {
@@ -82,41 +83,41 @@ object ScalaVersion {
)
AnyScalaVersion
}
-
+
def toInt(s: String) = s match {
case null | "" => 0
case _ => s.toInt
}
-
+
def isInt(s: String) = util.Try(toInt(s)).isSuccess
-
+
def toBuild(s: String) = s match {
case null | "FINAL" => Final
case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
case _ => Development(s)
}
-
+
try versionString match {
case "none" => NoScalaVersion
case "any" => AnyScalaVersion
- case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
- case _ =>
+ case _ =>
errorAndValue()
} catch {
case e: NumberFormatException => errorAndValue()
}
}
-
- def apply(versionString: String): ScalaVersion =
+
+ def apply(versionString: String): ScalaVersion =
apply(versionString, msg => throw new NumberFormatException(msg))
-
+
/**
* The version of the compiler running now
*/
val current = apply(util.Properties.versionNumberString)
-
+
/**
* The 2.8.0 version.
*/
@@ -126,7 +127,7 @@ object ScalaVersion {
/**
* Represents the data after the dash in major.minor.rev-build
*/
-abstract class ScalaBuild extends Ordered[ScalaBuild] {
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
/**
* Return a version of this build information that can be parsed back into the
* same ScalaBuild
@@ -138,7 +139,7 @@ abstract class ScalaBuild extends Ordered[ScalaBuild] {
*/
case class Development(id: String) extends ScalaBuild {
def unparse = s"-${id}"
-
+
def compare(that: ScalaBuild) = that match {
// sorting two development builds based on id is reasonably valid for two versions created with the same schema
// otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
@@ -154,7 +155,7 @@ case class Development(id: String) extends ScalaBuild {
*/
case object Final extends ScalaBuild {
def unparse = ""
-
+
def compare(that: ScalaBuild) = that match {
case Final => 0
// a final is newer than anything other than a development build or another final
@@ -168,14 +169,14 @@ case object Final extends ScalaBuild {
*/
case class RC(n: Int) extends ScalaBuild {
def unparse = s"-RC${n}"
-
+
def compare(that: ScalaBuild) = that match {
// compare two rcs based on their RC numbers
case RC(thatN) => n - thatN
// an rc is older than anything other than a milestone or another rc
case Milestone(_) => 1
- case _ => -1
- }
+ case _ => -1
+ }
}
/**
@@ -183,12 +184,12 @@ case class RC(n: Int) extends ScalaBuild {
*/
case class Milestone(n: Int) extends ScalaBuild {
def unparse = s"-M${n}"
-
+
def compare(that: ScalaBuild) = that match {
// compare two milestones based on their milestone numbers
case Milestone(thatN) => n - thatN
// a milestone is older than anything other than another milestone
case _ => -1
-
- }
+
+ }
}
diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
index e866ad6ae0..d173aaa848 100644
--- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala
@@ -41,16 +41,11 @@ trait StandardScalaSettings {
val optimise: BooleanSetting // depends on post hook which mutates other settings
val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.")
val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
- List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "msil"),
- "jvm-1.6")
+ List("jvm-1.5", "jvm-1.6", "jvm-1.7"), "jvm-1.6")
val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.")
val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.")
val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.")
+ val usemanifestcp = BooleanSetting ("-usemanifestcp", "Utilize the manifest in classpath resolution.")
val verbose = BooleanSetting ("-verbose", "Output messages about what the compiler is doing.")
val version = BooleanSetting ("-version", "Print product version and exit.")
-
- /** These are @<file> and -Dkey=val style settings, which don't
- * nicely map to identifiers.
- */
- val argfiles: BooleanSetting // exists only to echo help message, should be done differently
}
diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala
index 9f9879210c..791d44153c 100644
--- a/src/compiler/scala/tools/nsc/settings/Warnings.scala
+++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala
@@ -19,18 +19,17 @@ trait Warnings {
// present form, but have the potential to offer useful info.
protected def allWarnings = lintWarnings ++ List(
warnDeadCode,
- warnSelectNullable,
warnValueDiscard,
warnNumericWiden
)
// These warnings should be pretty quiet unless you're doing
// something inadvisable.
protected def lintWarnings = List(
- // warnDeadCode,
warnInaccessible,
warnNullaryOverride,
warnNullaryUnit,
- warnAdaptedArgs
+ warnAdaptedArgs,
+ warnInferAny
)
// Warning groups.
@@ -38,24 +37,28 @@ trait Warnings {
BooleanSetting("-Xlint", "Enable recommended additional warnings.")
withPostSetHook (_ => lintWarnings foreach (_.value = true))
)
- val warnEverything = (
+
+ /*val warnEverything = */ (
BooleanSetting("-Ywarn-all", "Enable all -Y warnings.")
- withPostSetHook (_ => lintWarnings foreach (_.value = true))
+ withPostSetHook { _ =>
+ lint.value = true
+ allWarnings foreach (_.value = true)
+ }
)
+ private lazy val warnSelectNullable = BooleanSetting("-Xcheck-null", "This option is obsolete and does nothing.")
// Individual warnings.
- val warnSelectNullable = BooleanSetting ("-Xcheck-null", "Warn upon selection of nullable reference.")
val warnAdaptedArgs = BooleanSetting ("-Ywarn-adapted-args", "Warn if an argument list is modified to match the receiver.")
val warnDeadCode = BooleanSetting ("-Ywarn-dead-code", "Warn when dead code is identified.")
val warnValueDiscard = BooleanSetting ("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.")
val warnNumericWiden = BooleanSetting ("-Ywarn-numeric-widen", "Warn when numerics are widened.")
val warnNullaryUnit = BooleanSetting ("-Ywarn-nullary-unit", "Warn when nullary methods return Unit.")
val warnInaccessible = BooleanSetting ("-Ywarn-inaccessible", "Warn about inaccessible types in method signatures.")
- val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override",
- "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+ val warnNullaryOverride = BooleanSetting ("-Ywarn-nullary-override", "Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.")
+ val warnInferAny = BooleanSetting ("-Ywarn-infer-any", "Warn when a type argument is inferred to be `Any`.")
// Backward compatibility.
- def Xwarnfatal = fatalWarnings
- def Xchecknull = warnSelectNullable
- def Ywarndeadcode = warnDeadCode
+ @deprecated("Use fatalWarnings", "2.11.0") def Xwarnfatal = fatalWarnings // used by sbt
+ @deprecated("This option is being removed", "2.11.0") def Xchecknull = warnSelectNullable // used by ide
+ @deprecated("Use warnDeadCode", "2.11.0") def Ywarndeadcode = warnDeadCode // used by ide
}
diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
index f2aab36b51..4e4efef607 100644
--- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package symtab
-import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.io.AbstractFile
/** A subclass of SymbolLoaders that implements browsing behavior.
@@ -28,7 +27,7 @@ abstract class BrowsingLoaders extends SymbolLoaders {
override protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = {
completer.sourcefile match {
case Some(src) =>
- (if (member.isModule) member.moduleClass else member).sourceFile = src
+ (if (member.isModule) member.moduleClass else member).associatedFile = src
case _ =>
}
val decls = owner.info.decls
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 9e5186b731..250feb69bf 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -10,10 +10,9 @@ import java.io.IOException
import scala.compat.Platform.currentTime
import scala.tools.nsc.util.{ ClassPath }
import classfile.ClassfileParser
-import scala.reflect.internal.Flags._
import scala.reflect.internal.MissingRequirementError
import scala.reflect.internal.util.Statistics
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
/** This class ...
*
@@ -31,6 +30,14 @@ abstract class SymbolLoaders {
member
}
+ protected def signalError(root: Symbol, ex: Throwable) {
+ if (settings.debug) ex.printStackTrace()
+ globalError(ex.getMessage() match {
+ case null => "i/o error while loading " + root.name
+ case msg => "error while loading " + root.name + ", " + msg
+ })
+ }
+
/** Enter class with given `name` into scope of `root`
* and give them `completer` as type.
*/
@@ -136,10 +143,10 @@ abstract class SymbolLoaders {
((classRep.binary, classRep.source) : @unchecked) match {
case (Some(bin), Some(src))
if platform.needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
- if (settings.verbose.value) inform("[symloader] picked up newer source file for " + src.path)
+ if (settings.verbose) inform("[symloader] picked up newer source file for " + src.path)
global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
case (None, Some(src)) =>
- if (settings.verbose.value) inform("[symloader] no class, picked up source file for " + src.path)
+ if (settings.verbose) inform("[symloader] no class, picked up source file for " + src.path)
global.loaders.enterToplevelsFromSource(owner, classRep.name, src)
case (Some(bin), _) =>
global.loaders.enterClassAndModule(owner, classRep.name, platform.newClassLoader(bin))
@@ -160,7 +167,7 @@ abstract class SymbolLoaders {
def sourcefile: Option[AbstractFile] = None
/**
- * Description of the resource (ClassPath, AbstractFile, MsilFile)
+ * Description of the resource (ClassPath, AbstractFile)
* being processed by this loader
*/
protected def description: String
@@ -169,25 +176,13 @@ abstract class SymbolLoaders {
private def setSource(sym: Symbol) {
sourcefile foreach (sf => sym match {
- case cls: ClassSymbol => cls.sourceFile = sf
- case mod: ModuleSymbol => mod.moduleClass.sourceFile = sf
+ case cls: ClassSymbol => cls.associatedFile = sf
+ case mod: ModuleSymbol => mod.moduleClass.associatedFile = sf
case _ => ()
})
}
override def complete(root: Symbol) {
- def signalError(ex: Exception) {
- ok = false
- if (settings.debug.value) ex.printStackTrace()
- val msg = ex.getMessage()
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
- }
try {
val start = currentTime
val currentphase = phase
@@ -197,11 +192,11 @@ abstract class SymbolLoaders {
ok = true
setSource(root)
setSource(root.companionSymbol) // module -> class, class -> module
- } catch {
- case ex: IOException =>
- signalError(ex)
- case ex: MissingRequirementError =>
- signalError(ex)
+ }
+ catch {
+ case ex @ (_: IOException | _: MissingRequirementError) =>
+ ok = false
+ signalError(root, ex)
}
initRoot(root)
if (!root.isPackageClass) initRoot(root.companionSymbol)
@@ -233,7 +228,6 @@ abstract class SymbolLoaders {
assert(root.isPackageClass, root)
root.setInfo(new PackageClassInfoType(newScope, root))
- val sourcepaths = classpath.sourcepaths
if (!root.isRoot) {
for (classRep <- classpath.classes if platform.doLoad(classRep)) {
initializeFromClassPath(root, classRep)
@@ -259,7 +253,7 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) {
val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null
classfileParser.parse(classfile, root)
- if (root.associatedFile eq null) {
+ if (root.associatedFile eq NoAbstractFile) {
root match {
// In fact, the ModuleSymbol forwards its setter to the module class
case _: ClassSymbol | _: ModuleSymbol =>
@@ -274,16 +268,6 @@ abstract class SymbolLoaders {
override def sourcefile: Option[AbstractFile] = classfileParser.srcfile
}
- class MsilFileLoader(msilFile: MsilFile) extends SymbolLoader with FlagAssigningCompleter {
- private def typ = msilFile.msilType
- private object typeParser extends clr.TypeParser {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- }
-
- protected def description = "MsilFile "+ typ.FullName + ", assembly "+ typ.Assembly.FullName
- protected def doComplete(root: Symbol) { typeParser.parse(typ, root) }
- }
-
class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter {
protected def description = "source file "+ srcfile.toString
override def fromSource = true
@@ -296,11 +280,6 @@ abstract class SymbolLoaders {
protected def doComplete(root: Symbol) { root.sourceModule.initialize }
}
- object clrTypes extends clr.CLRTypes {
- val global: SymbolLoaders.this.global.type = SymbolLoaders.this.global
- if (global.forMSIL) init()
- }
-
/** used from classfile parser to avoid cyclies */
var parentsLevel = 0
var pendingLoadActions: List[() => Unit] = Nil
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
index 7a84441e09..daaa625164 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package symtab
-import scala.collection.{ mutable, immutable }
import scala.language.implicitConversions
import scala.language.postfixOps
@@ -17,9 +16,6 @@ trait SymbolTrackers {
val global: Global
import global._
- private implicit lazy val TreeOrdering: Ordering[Tree] =
- Ordering by (x => (x.shortClass, x.symbol))
-
private implicit lazy val SymbolOrdering: Ordering[Symbol] =
Ordering by (x => (x.kindString, x.name.toString))
@@ -76,7 +72,6 @@ trait SymbolTrackers {
private def isFlagsChange(sym: Symbol) = changed.flags contains sym
private implicit def NodeOrdering: Ordering[Node] = Ordering by (_.root)
- private def ownersString(sym: Symbol, num: Int) = sym.ownerChain drop 1 take num mkString " -> "
object Node {
def nodes(syms: Set[Symbol]): List[Node] = {
@@ -114,7 +109,6 @@ trait SymbolTrackers {
case Some(oldFlags) =>
val added = masked & ~oldFlags
val removed = oldFlags & ~masked
- val steady = masked & ~(added | removed)
val all = masked | oldFlags
val strs = 0 to 63 map { bit =>
val flag = 1L << bit
@@ -133,7 +127,7 @@ trait SymbolTrackers {
else " (" + Flags.flagsToString(masked) + ")"
}
def symString(sym: Symbol) = (
- if (settings.debug.value && sym.hasCompleteInfo) {
+ if (settings.debug && sym.hasCompleteInfo) {
val s = sym.defString take 240
if (s.length == 240) s + "..." else s
}
@@ -181,7 +175,7 @@ trait SymbolTrackers {
}
def show(label: String): String = {
val hierarchy = Node(current)
- val Change(added, removed, symMap, owners, flags) = history.head
+ val Change(_, removed, symMap, _, _) = history.head
def detailString(sym: Symbol) = {
val ownerString = sym.ownerChain splitAt 3 match {
case (front, back) =>
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
index 427b5bf887..17e3b08ec2 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala
@@ -29,11 +29,6 @@ class AbstractFileReader(val file: AbstractFile) {
*/
var bp: Int = 0
- /** return byte at offset 'pos'
- */
- @throws(classOf[IndexOutOfBoundsException])
- def byteAt(pos: Int): Byte = buf(pos)
-
/** read a byte
*/
@throws(classOf[IndexOutOfBoundsException])
@@ -45,7 +40,7 @@ class AbstractFileReader(val file: AbstractFile) {
/** read some bytes
*/
- def nextBytes(len: Int): Array[Byte] = {
+ def nextBytes(len: Int): Array[Byte] = { // used in ide
bp += len
buf.slice(bp - len, bp)
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 47a8b1f947..fb927d15d3 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package symtab
package classfile
@@ -12,9 +13,11 @@ import java.lang.Integer.toHexString
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
+import scala.reflect.internal.{ JavaAccFlags }
import scala.reflect.internal.pickling.{PickleBuffer, ByteCodecs}
import scala.tools.nsc.io.AbstractFile
+
/** This abstract class implements a class file parser.
*
* @author Martin Odersky
@@ -23,15 +26,15 @@ import scala.tools.nsc.io.AbstractFile
abstract class ClassfileParser {
val global: Global
import global._
- import definitions.{ AnnotationClass, ClassfileAnnotationClass }
+ import definitions._
import scala.reflect.internal.ClassfileConstants._
import Flags._
protected var in: AbstractFileReader = _ // the class file reader
protected var clazz: Symbol = _ // the class symbol containing dynamic members
protected var staticModule: Symbol = _ // the module symbol containing static members
- protected var instanceScope: Scope = _ // the scope of all instance definitions
- protected var staticScope: Scope = _ // the scope of all static definitions
+ protected var instanceScope: Scope = _ // the scope of all instance definitions
+ protected var staticScope: Scope = _ // the scope of all static definitions
protected var pool: ConstantPool = _ // the classfile's constant pool
protected var isScala: Boolean = _ // does class file describe a scala class?
protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation?
@@ -41,19 +44,41 @@ abstract class ClassfileParser {
protected var classTParams = Map[Name,Symbol]()
protected var srcfile0 : Option[AbstractFile] = None
protected def moduleClass: Symbol = staticModule.moduleClass
+ private var sawPrivateConstructor = false
+
+ private def ownerForFlags(jflags: JavaAccFlags) = if (jflags.isStatic) moduleClass else clazz
def srcfile = srcfile0
+ private def optimized = global.settings.optimise.value
+ private def currentIsTopLevel = !(currentClass.decodedName containsChar '$')
+
+ // u1, u2, and u4 are what these data types are called in the JVM spec.
+ // They are an unsigned byte, unsigned char, and unsigned int respectively.
+ // We bitmask u1 into an Int to make sure it's 0-255 (and u1 isn't used
+ // for much beyond tags) but leave u2 alone as it's already unsigned.
+ protected final def u1(): Int = in.nextByte & 0xFF
+ protected final def u2(): Int = in.nextChar.toInt
+ protected final def u4(): Int = in.nextInt
+
+ private def readInnerClassFlags() = readClassFlags()
+ private def readClassFlags() = JavaAccFlags classFlags u2
+ private def readMethodFlags() = JavaAccFlags methodFlags u2
+ private def readFieldFlags() = JavaAccFlags fieldFlags u2
+ private def readTypeName() = readName().toTypeName
+ private def readName() = pool getName u2
+ private def readType() = pool getType u2
+
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
}
private def handleMissing(e: MissingRequirementError) = {
- if (settings.debug.value) e.printStackTrace
+ if (settings.debug) e.printStackTrace
throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
}
private def handleError(e: Exception) = {
- if (settings.debug.value) e.printStackTrace()
+ if (settings.debug) e.printStackTrace()
throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
}
private def mismatchError(c: Symbol) = {
@@ -94,75 +119,67 @@ abstract class ClassfileParser {
this.staticModule = if (root.isModule) root else root.companionModule
this.isScala = false
- parseHeader
+ parseHeader()
this.pool = new ConstantPool
parseClass()
}
}
private def parseHeader() {
- val magic = in.nextInt
+ val magic = u4
if (magic != JAVA_MAGIC)
- throw new IOException("class file '" + in.file + "' "
- + "has wrong magic number 0x" + toHexString(magic)
- + ", should be 0x" + toHexString(JAVA_MAGIC))
- val minorVersion = in.nextChar.toInt
- val majorVersion = in.nextChar.toInt
- if ((majorVersion < JAVA_MAJOR_VERSION) ||
- ((majorVersion == JAVA_MAJOR_VERSION) &&
- (minorVersion < JAVA_MINOR_VERSION)))
- throw new IOException("class file '" + in.file + "' "
- + "has unknown version "
- + majorVersion + "." + minorVersion
- + ", should be at least "
- + JAVA_MAJOR_VERSION + "." + JAVA_MINOR_VERSION)
+ abort(s"class file ${in.file} has wrong magic number 0x${toHexString(magic)}")
+
+ val minor, major = u2
+ if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION)
+ abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
}
class ConstantPool {
- private val len = in.nextChar
- private val starts = new Array[Int](len)
- private val values = new Array[AnyRef](len)
+ private val len = u2
+ private val starts = new Array[Int](len)
+ private val values = new Array[AnyRef](len)
private val internalized = new Array[Name](len)
{ var i = 1
while (i < starts.length) {
starts(i) = in.bp
i += 1
- (in.nextByte.toInt: @switch) match {
- case CONSTANT_UTF8 | CONSTANT_UNICODE =>
- in.skip(in.nextChar)
- case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
- in.skip(2)
- case CONSTANT_METHODHANDLE =>
- in.skip(3)
- case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
- | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
- | CONSTANT_INVOKEDYNAMIC =>
- in.skip(4)
- case CONSTANT_LONG | CONSTANT_DOUBLE =>
- in.skip(8)
- i += 1
- case _ =>
- errorBadTag(in.bp - 1)
+ (u1: @switch) match {
+ case CONSTANT_UTF8 | CONSTANT_UNICODE => in skip u2
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => in skip 2
+ case CONSTANT_METHODHANDLE => in skip 3
+ case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF => in skip 4
+ case CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT => in skip 4
+ case CONSTANT_INVOKEDYNAMIC => in skip 4
+ case CONSTANT_LONG | CONSTANT_DOUBLE => in skip 8 ; i += 1
+ case _ => errorBadTag(in.bp - 1)
}
}
}
- /** Return the name found at given index. */
- def getName(index: Int): Name = {
- if (index <= 0 || len <= index)
- errorBadIndex(index)
+ def recordAtIndex[T <: AnyRef](value: T, idx: Int): T = {
+ values(idx) = value
+ value
+ }
- values(index) match {
+ def firstExpecting(index: Int, expected: Int): Int = {
+ val start = starts(index)
+ val first = in.buf(start).toInt
+ if (first == expected) start + 1
+ else this errorBadTag start
+ }
+
+ /** Return the name found at given index. */
+ def getName(index: Int): Name = (
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ else values(index) match {
case name: Name => name
- case null =>
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val name = newTermName(in.buf, start + 3, in.getChar(start + 1))
- values(index) = name
- name
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ recordAtIndex(newTermName(in.buf, start + 2, in.getChar(start).toInt), index)
}
- }
+ )
/** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
def getExternalName(index: Int): Name = {
@@ -177,28 +194,23 @@ abstract class ClassfileParser {
def getClassSymbol(index: Int): Symbol = {
if (index <= 0 || len <= index) errorBadIndex(index)
- var c = values(index).asInstanceOf[Symbol]
- if (c eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- val name = getExternalName(in.getChar(start + 1))
- if (nme.isModuleName(name))
- c = rootMirror.getModule(nme.stripModuleSuffix(name))
- else
- c = classNameToSymbol(name)
-
- values(index) = c
+ values(index) match {
+ case sym: Symbol => sym
+ case _ =>
+ val result = getClassName(index) match {
+ case name if nme.isModuleName(name) => rootMirror getModuleByName name.dropModule
+ case name => classNameToSymbol(name)
+ }
+ recordAtIndex(result, index)
}
- c
}
/** Return the external name of the class info structure found at 'index'.
* Use 'getClassSymbol' if the class is sure to be a top-level class.
*/
def getClassName(index: Int): Name = {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- getExternalName(in.getChar(start + 1))
+ val start = firstExpecting(index, CONSTANT_CLASS)
+ getExternalName((in getChar start).toInt)
}
/** Return the symbol of the class member at `index`.
@@ -219,35 +231,35 @@ abstract class ClassfileParser {
if (first != CONSTANT_FIELDREF &&
first != CONSTANT_METHODREF &&
first != CONSTANT_INTFMETHODREF) errorBadTag(start)
- val ownerTpe = getClassOrArrayType(in.getChar(start + 1))
+ val ownerTpe = getClassOrArrayType(in.getChar(start + 1).toInt)
debuglog("getMemberSymbol(static: " + static + "): owner type: " + ownerTpe + " " + ownerTpe.typeSymbol.originalName)
- val (name0, tpe0) = getNameAndType(in.getChar(start + 3), ownerTpe)
+ val (name0, tpe0) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
debuglog("getMemberSymbol: name and tpe: " + name0 + ": " + tpe0)
- forceMangledName(tpe0.typeSymbol.name, false)
- val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
+ forceMangledName(tpe0.typeSymbol.name, module = false)
+ val (name, tpe) = getNameAndType(in.getChar(start + 3).toInt, ownerTpe)
if (name == nme.MODULE_INSTANCE_FIELD) {
- val index = in.getChar(start + 1)
- val name = getExternalName(in.getChar(starts(index) + 1))
+ val index = in.getChar(start + 1).toInt
+ val name = getExternalName(in.getChar(starts(index).toInt + 1).toInt)
//assert(name.endsWith("$"), "Not a module class: " + name)
- f = forceMangledName(name dropRight 1, true)
+ f = forceMangledName(name dropRight 1, module = true)
if (f == NoSymbol)
- f = rootMirror.getModule(name dropRight 1)
+ f = rootMirror.getModuleByName(name dropRight 1)
} else {
- val origName = nme.originalName(name)
+ val origName = nme.unexpandedName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
- f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
+ f = owner.info.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe.widen =:= tpe)
if (f == NoSymbol)
- f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
+ f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
if (ownerTpe.typeSymbol.isImplClass) {
- f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
+ f = ownerTpe.findMember(origName, 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
f = tpe match {
- case MethodType(_, _) => owner.newMethod(name, owner.pos)
- case _ => owner.newVariable(name, owner.pos)
+ case MethodType(_, _) => owner.newMethod(name.toTermName, owner.pos)
+ case _ => owner.newVariable(name.toTermName, owner.pos)
}
f setInfo tpe
log("created fake member " + f.fullName)
@@ -272,94 +284,67 @@ abstract class ClassfileParser {
*/
private def getNameAndType(index: Int, ownerTpe: Type): (Name, Type) = {
if (index <= 0 || len <= index) errorBadIndex(index)
- var p = values(index).asInstanceOf[(Name, Type)]
- if (p eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
- val name = getName(in.getChar(start + 1).toInt)
- // create a dummy symbol for method types
- val dummySym = ownerTpe.typeSymbol.newMethod(name, ownerTpe.typeSymbol.pos)
- var tpe = getType(dummySym, in.getChar(start + 3).toInt)
-
- // fix the return type, which is blindly set to the class currently parsed
- if (name == nme.CONSTRUCTOR)
- tpe match {
- case MethodType(formals, restpe) =>
- tpe = MethodType(formals, ownerTpe)
+ values(index) match {
+ case p: ((Name @unchecked, Type @unchecked)) => p
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_NAMEANDTYPE)
+ val name = getName(in.getChar(start).toInt)
+ // create a dummy symbol for method types
+ val dummy = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos)
+ val tpe = getType(dummy, in.getChar(start + 2).toInt)
+ // fix the return type, which is blindly set to the class currently parsed
+ val restpe = tpe match {
+ case MethodType(formals, _) if name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe)
+ case _ => tpe
}
-
- p = (name, tpe)
+ ((name, restpe))
}
- p
}
/** Return the type of a class constant entry. Since
* arrays are considered to be class types, they might
* appear as entries in 'newarray' or 'cast' opcodes.
*/
- def getClassOrArrayType(index: Int): Type = {
+ def getClassOrArrayType(index: Int): Type = (
if (index <= 0 || len <= index) errorBadIndex(index)
- val value = values(index)
- var c: Type = null
- if (value eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
- val name = getExternalName(in.getChar(start + 1))
- if (name.charAt(0) == ARRAY_TAG) {
- c = sigToType(null, name)
- values(index) = c
- } else {
- val sym = classNameToSymbol(name)
- /*if (name.endsWith("$")) definitions.getModule(name.subName(0, name.length - 1))
- else if (name.endsWith("$class")) definitions.getModule(name)
- else definitions.getClass(name)*/
- values(index) = sym
- c = sym.tpe
- }
- } else c = value match {
- case tp: Type => tp
- case cls: Symbol => cls.tpe
+ else values(index) match {
+ case tp: Type => tp
+ case cls: Symbol => cls.tpe_*
+ case _ =>
+ val name = getClassName(index)
+ name charAt 0 match {
+ case ARRAY_TAG => recordAtIndex(sigToType(null, name), index)
+ case _ => recordAtIndex(classNameToSymbol(name), index).tpe_*
+ }
}
- c
- }
-
- def getType(index: Int): Type = getType(null, index)
-
- def getType(sym: Symbol, index: Int): Type =
- sigToType(sym, getExternalName(index))
+ )
- def getSuperClass(index: Int): Symbol =
- if (index == 0) definitions.AnyClass else getClassSymbol(index)
+ def getType(index: Int): Type = getType(null, index)
+ def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index))
+ def getSuperClass(index: Int): Symbol = if (index == 0) AnyClass else getClassSymbol(index)
- def getConstant(index: Int): Constant = {
+ private def createConstant(index: Int): Constant = {
+ val start = starts(index)
+ Constant((in.buf(start).toInt: @switch) match {
+ case CONSTANT_STRING => getName(in.getChar(start + 1).toInt).toString
+ case CONSTANT_INTEGER => in.getInt(start + 1)
+ case CONSTANT_FLOAT => in.getFloat(start + 1)
+ case CONSTANT_LONG => in.getLong(start + 1)
+ case CONSTANT_DOUBLE => in.getDouble(start + 1)
+ case CONSTANT_CLASS => getClassOrArrayType(index).typeSymbol.tpe_* // !!! Is this necessary or desirable?
+ case _ => errorBadTag(start)
+ })
+ }
+ def getConstant(index: Char): Constant = getConstant(index.toInt)
+ def getConstant(index: Int): Constant = (
if (index <= 0 || len <= index) errorBadIndex(index)
- var value = values(index)
- if (value eq null) {
- val start = starts(index)
- value = (in.buf(start).toInt: @switch) match {
- case CONSTANT_STRING =>
- Constant(getName(in.getChar(start + 1).toInt).toString)
- case CONSTANT_INTEGER =>
- Constant(in.getInt(start + 1))
- case CONSTANT_FLOAT =>
- Constant(in.getFloat(start + 1))
- case CONSTANT_LONG =>
- Constant(in.getLong(start + 1))
- case CONSTANT_DOUBLE =>
- Constant(in.getDouble(start + 1))
- case CONSTANT_CLASS =>
- getClassOrArrayType(index).typeSymbol
- case _ =>
- errorBadTag(start)
- }
- values(index) = value
- }
- value match {
- case ct: Constant => ct
- case cls: Symbol => Constant(cls.tpe)
- case arr: Type => Constant(arr)
+ else values(index) match {
+ case const: Constant => const
+ case sym: Symbol => Constant(sym.tpe_*)
+ case tpe: Type => Constant(tpe)
+ case _ => recordAtIndex(createConstant(index), index)
}
- }
+ )
private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
val decodedLength = ByteCodecs.decode(bytes)
@@ -368,46 +353,41 @@ abstract class ClassfileParser {
arr
}
- def getBytes(index: Int): Array[Byte] = {
+ def getBytes(index: Int): Array[Byte] = (
if (index <= 0 || len <= index) errorBadIndex(index)
- var value = values(index).asInstanceOf[Array[Byte]]
- if (value eq null) {
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
- val bytes = new Array[Byte](len)
- System.arraycopy(in.buf, start + 3, bytes, 0, len)
- value = getSubArray(bytes)
- values(index) = value
+ else values(index) match {
+ case xs: Array[Byte] => xs
+ case _ =>
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ val len = (in getChar start).toInt
+ val bytes = new Array[Byte](len)
+ System.arraycopy(in.buf, start + 2, bytes, 0, len)
+ recordAtIndex(getSubArray(bytes), index)
}
- value
- }
+ )
def getBytes(indices: List[Int]): Array[Byte] = {
- assert(!indices.isEmpty, indices)
- var value = values(indices.head).asInstanceOf[Array[Byte]]
- if (value eq null) {
- val bytesBuffer = ArrayBuffer.empty[Byte]
- for (index <- indices) {
- if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
- val start = starts(index)
- if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
- val len = in.getChar(start + 1)
- bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
- }
- value = getSubArray(bytesBuffer.toArray)
- values(indices.head) = value
+ val head = indices.head
+ values(head) match {
+ case xs: Array[Byte] => xs
+ case _ =>
+ val arr: Array[Byte] = indices.toArray flatMap { index =>
+ if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+ val start = firstExpecting(index, CONSTANT_UTF8)
+ val len = (in getChar start).toInt
+ in.buf drop start + 2 take len
+ }
+ recordAtIndex(getSubArray(arr), head)
}
- value
}
/** Throws an exception signaling a bad constant index. */
private def errorBadIndex(index: Int) =
- throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
+ abort(s"bad constant pool index: $index at pos: ${in.bp}")
/** Throws an exception signaling a bad tag at given address. */
private def errorBadTag(start: Int) =
- throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start)
+ abort("bad constant pool tag ${in.buf(start)} at byte $start")
}
/** Try to force the chain of enclosing classes for the given name. Otherwise
@@ -418,9 +398,9 @@ abstract class ClassfileParser {
var sym: Symbol = rootMirror.RootClass
// was "at flatten.prev"
- beforeFlatten {
+ enteringFlatten {
for (part0 <- parts; if !(part0 == ""); part = newTermName(part0)) {
- val sym1 = beforeIcode {
+ val sym1 = enteringIcode {
sym.linkedClassOfClass.info
sym.info.decl(part.encode)
}//.suchThat(module == _.isModule)
@@ -451,7 +431,7 @@ abstract class ClassfileParser {
ss = name.subName(start, end)
sym = owner.info.decls lookup ss
if (sym == NoSymbol) {
- sym = owner.newPackage(ss) setInfo completer
+ sym = owner.newPackage(ss.toTermName) setInfo completer
sym.moduleClass setInfo completer
owner.info.decls enter sym
}
@@ -466,6 +446,7 @@ abstract class ClassfileParser {
sym
}
}
+
/** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
* The method called "getClassByName" should either return the class or not.
*/
@@ -485,30 +466,27 @@ abstract class ClassfileParser {
catch { case _: FatalError => loadClassSymbol(name) }
}
- var sawPrivateConstructor = false
-
def parseClass() {
- val jflags = in.nextChar
- val isAnnotation = hasAnnotation(jflags)
- var sflags = toScalaClassFlags(jflags)
- var nameIdx = in.nextChar
- currentClass = pool.getClassName(nameIdx)
-
- /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
- * Updates the read pointer of 'in'. */
+ val jflags = readClassFlags()
+ val sflags = jflags.toScalaFlags
+ val nameIdx = u2
+ currentClass = pool.getClassName(nameIdx)
+
+ /* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
+ * Updates the read pointer of 'in'. */
def parseParents: List[Type] = {
if (isScala) {
- in.nextChar // skip superclass
- val ifaces = in.nextChar
- in.bp += ifaces * 2 // .. and iface count interfaces
- List(definitions.AnyRefClass.tpe) // dummy superclass, will be replaced by pickled information
+ u2 // skip superclass
+ val ifaces = u2
+ in.bp += ifaces * 2 // .. and iface count interfaces
+ List(AnyRefTpe) // dummy superclass, will be replaced by pickled information
}
else raiseLoaderLevel {
- val superType = if (isAnnotation) { in.nextChar; definitions.AnnotationClass.tpe }
- else pool.getSuperClass(in.nextChar).tpe
- val ifaceCount = in.nextChar
- var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(in.nextChar).tpe
- if (isAnnotation) ifaces = definitions.ClassfileAnnotationClass.tpe :: ifaces
+ val superType = if (jflags.isAnnotation) { u2; AnnotationClass.tpe }
+ else pool.getSuperClass(u2).tpe_*
+ val ifaceCount = u2
+ var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(u2).tpe_*
+ if (jflags.isAnnotation) ifaces ::= ClassfileAnnotationClass.tpe
superType :: ifaces
}
}
@@ -533,31 +511,30 @@ abstract class ClassfileParser {
val staticInfo = ClassInfoType(List(), staticScope, moduleClass)
if (!isScala && !isScalaRaw)
- enterOwnInnerClasses
+ enterOwnInnerClasses()
val curbp = in.bp
skipMembers() // fields
skipMembers() // methods
if (!isScala) {
clazz setFlag sflags
- importPrivateWithinFromJavaFlags(clazz, jflags)
- importPrivateWithinFromJavaFlags(staticModule, jflags)
- clazz.setInfo(classInfo)
+ propagatePackageBoundary(jflags, clazz, staticModule)
+ clazz setInfo classInfo
moduleClass setInfo staticInfo
- staticModule.setInfo(moduleClass.tpe)
- staticModule.setFlag(JAVA)
- staticModule.moduleClass.setFlag(JAVA)
+ staticModule setInfo moduleClass.tpe
+ staticModule setFlag JAVA
+ staticModule.moduleClass setFlag JAVA
// attributes now depend on having infos set already
parseAttributes(clazz, classInfo)
def queueLoad() {
in.bp = curbp
- 0 until in.nextChar foreach (_ => parseField())
+ 0 until u2 foreach (_ => parseField())
sawPrivateConstructor = false
- 0 until in.nextChar foreach (_ => parseMethod())
+ 0 until u2 foreach (_ => parseMethod())
val needsConstructor = (
!sawPrivateConstructor
- && instanceScope.lookup(nme.CONSTRUCTOR) == NoSymbol
+ && !(instanceScope containsName nme.CONSTRUCTOR)
&& (sflags & INTERFACE) == 0
)
if (needsConstructor)
@@ -588,26 +565,28 @@ abstract class ClassfileParser {
}
def parseField() {
- val jflags = in.nextChar
- var sflags = toScalaFieldFlags(jflags)
- if ((sflags & PRIVATE) != 0L && !global.settings.optimise.value) {
+ val jflags = readFieldFlags()
+ val sflags = jflags.toScalaFlags
+
+ if ((sflags & PRIVATE) != 0L && !optimized) {
in.skip(4); skipAttributes()
} else {
- val name = pool.getName(in.nextChar)
- val info = pool.getType(in.nextChar)
- val sym = getOwner(jflags).newValue(name, NoPosition, sflags)
- val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val name = readName()
+ val info = readType()
+ val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags)
+ // Note: the info may be overrwritten later with a generic signature
+ // parsed from SignatureATTR
sym setInfo {
- if (isEnum) ConstantType(Constant(sym))
+ if (jflags.isEnum) ConstantType(Constant(sym))
else info
}
- importPrivateWithinFromJavaFlags(sym, jflags)
+ propagatePackageBoundary(jflags, sym)
parseAttributes(sym, info)
- getScope(jflags).enter(sym)
+ getScope(jflags) enter sym
// sealed java enums
- if (isEnum) {
+ if (jflags.isEnum) {
val enumClass = sym.owner.linkedClassOfClass
if (!enumClass.isSealed)
enumClass setFlag (SEALED | ABSTRACT)
@@ -618,26 +597,27 @@ abstract class ClassfileParser {
}
def parseMethod() {
- val jflags = in.nextChar.toInt
- var sflags = toScalaMethodFlags(jflags)
- if (isPrivate(jflags) && !global.settings.optimise.value) {
- val name = pool.getName(in.nextChar)
+ val jflags = readMethodFlags()
+ val sflags = jflags.toScalaFlags
+ if (jflags.isPrivate && !optimized) {
+ val name = readName()
if (name == nme.CONSTRUCTOR)
sawPrivateConstructor = true
in.skip(2); skipAttributes()
- } else {
- if ((sflags & PRIVATE) != 0L && global.settings.optimise.value) {
+ }
+ else {
+ if ((sflags & PRIVATE) != 0L && optimized) {
in.skip(4); skipAttributes()
} else {
- val name = pool.getName(in.nextChar)
- val sym = getOwner(jflags).newMethod(name, NoPosition, sflags)
- var info = pool.getType(sym, (in.nextChar))
+ val name = readName()
+ val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags)
+ var info = pool.getType(sym, u2)
if (name == nme.CONSTRUCTOR)
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
val newParams = innerClasses getEntry currentClass match {
- case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
+ case Some(entry) if !isScalaRaw && !entry.jflags.isStatic =>
/* About `clazz.owner.isPackage` below: SI-5957
* For every nested java class A$B, there are two symbols in the scala compiler.
* 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package
@@ -652,13 +632,15 @@ abstract class ClassfileParser {
}
info = MethodType(newParams, clazz.tpe)
}
- sym.setInfo(info)
- importPrivateWithinFromJavaFlags(sym, jflags)
+ // Note: the info may be overrwritten later with a generic signature
+ // parsed from SignatureATTR
+ sym setInfo info
+ propagatePackageBoundary(jflags, sym)
parseAttributes(sym, info)
- if ((jflags & JAVA_ACC_VARARGS) != 0) {
- sym.setInfo(arrayToRepeated(sym.info))
- }
- getScope(jflags).enter(sym)
+ if (jflags.isVarargs)
+ sym modifyInfo arrayToRepeated
+
+ getScope(jflags) enter sym
}
}
}
@@ -678,15 +660,15 @@ abstract class ClassfileParser {
def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = {
val tag = sig.charAt(index); index += 1
tag match {
- case BYTE_TAG => definitions.ByteClass.tpe
- case CHAR_TAG => definitions.CharClass.tpe
- case DOUBLE_TAG => definitions.DoubleClass.tpe
- case FLOAT_TAG => definitions.FloatClass.tpe
- case INT_TAG => definitions.IntClass.tpe
- case LONG_TAG => definitions.LongClass.tpe
- case SHORT_TAG => definitions.ShortClass.tpe
- case VOID_TAG => definitions.UnitClass.tpe
- case BOOL_TAG => definitions.BooleanClass.tpe
+ case BYTE_TAG => ByteTpe
+ case CHAR_TAG => CharTpe
+ case DOUBLE_TAG => DoubleTpe
+ case FLOAT_TAG => FloatTpe
+ case INT_TAG => IntTpe
+ case LONG_TAG => LongTpe
+ case SHORT_TAG => ShortTpe
+ case VOID_TAG => UnitTpe
+ case BOOL_TAG => BooleanTpe
case 'L' =>
def processInner(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) if (!sym.isStatic) =>
@@ -711,7 +693,7 @@ abstract class ClassfileParser {
val tp = sig2type(tparams, skiptvs)
// sig2type seems to return AnyClass regardless of the situation:
// we don't want Any as a LOWER bound.
- if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty
+ if (tp.typeSymbol == AnyClass) TypeBounds.empty
else TypeBounds.lower(tp)
case '*' => TypeBounds.empty
}
@@ -725,15 +707,14 @@ abstract class ClassfileParser {
}
accept('>')
assert(xs.length > 0, tp)
- newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList))
- } else if (classSym.isMonomorphicType) {
- tp
- } else {
- // raw type - existentially quantify all type parameters
- val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
- val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
- newExistentialType(eparams, t)
+ logResult("new existential")(newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)))
}
+ // isMonomorphicType is false if the info is incomplete, as it usually is here
+ // so have to check unsafeTypeParams.isEmpty before worrying about raw type case below,
+ // or we'll create a boatload of needless existentials.
+ else if (classSym.isMonomorphicType || classSym.unsafeTypeParams.isEmpty) tp
+ // raw type - existentially quantify all type parameters
+ else logResult(s"raw type from $classSym")(unsafeClassExistentialType(classSym))
case tp =>
assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
@@ -741,12 +722,14 @@ abstract class ClassfileParser {
val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
assert(!classSym.isOverloaded, classSym.alternatives)
- var tpe = processClassType(processInner(classSym.tpe))
+ var tpe = processClassType(processInner(classSym.tpe_*))
while (sig.charAt(index) == '.') {
accept('.')
val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
val clazz = tpe.member(name)
- tpe = processClassType(processInner(clazz.tpe))
+ val dummyArgs = Nil // the actual arguments are added in processClassType
+ val inner = typeRef(pre = tpe, sym = clazz, args = dummyArgs)
+ tpe = processClassType(inner)
}
accept(';')
tpe
@@ -759,11 +742,11 @@ abstract class ClassfileParser {
// NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
// if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
// see also RestrictJavaArraysMap (when compiling java sources directly)
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe)) {
- elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe)) {
+ elemtp = intersectionType(List(elemtp, ObjectTpe))
}
- definitions.arrayType(elemtp)
+ arrayType(elemtp)
case '(' =>
// we need a method symbol. given in line 486 by calling getType(methodSym, ..)
assert(sym ne null, sig)
@@ -774,14 +757,14 @@ abstract class ClassfileParser {
index += 1
val restype = if (sym != null && sym.isClassConstructor) {
accept('V')
- clazz.tpe
+ clazz.tpe_*
} else
sig2type(tparams, skiptvs)
JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype)
case 'T' =>
val n = subName(';'.==).toTypeName
index += 1
- if (skiptvs) definitions.AnyClass.tpe
+ if (skiptvs) AnyTpe
else tparams(n).typeConstructor
}
} // sig2type(tparams, skiptvs)
@@ -806,14 +789,14 @@ abstract class ClassfileParser {
val tpname = subName(':'.==).toTypeName
val s = sym.newTypeParameter(tpname)
tparams = tparams + (tpname -> s)
- sig2typeBounds(tparams, true)
+ sig2typeBounds(tparams, skiptvs = true)
newTParams += s
}
index = start
while (sig.charAt(index) != '>') {
val tpname = subName(':'.==).toTypeName
val s = tparams(tpname)
- s.setInfo(sig2typeBounds(tparams, false))
+ s.setInfo(sig2typeBounds(tparams, skiptvs = false))
}
accept('>')
}
@@ -822,36 +805,32 @@ abstract class ClassfileParser {
sym.setInfo(new TypeParamsType(ownTypeParams))
val tpe =
if ((sym eq null) || !sym.isClass)
- sig2type(tparams, false)
+ sig2type(tparams, skiptvs = false)
else {
classTParams = tparams
val parents = new ListBuffer[Type]()
while (index < end) {
- parents += sig2type(tparams, false) // here the variance doesnt'matter
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
}
ClassInfoType(parents.toList, instanceScope, sym)
}
GenPolyType(ownTypeParams, tpe)
} // sigToType
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
- override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
- }
-
def parseAttributes(sym: Symbol, symtype: Type) {
def convertTo(c: Constant, pt: Type): Constant = {
- if (pt.typeSymbol == definitions.BooleanClass && c.tag == IntTag)
+ if (pt.typeSymbol == BooleanClass && c.tag == IntTag)
Constant(c.value != 0)
else
c convertTo pt
}
def parseAttribute() {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = readTypeName()
+ val attrLen = u4
attrName match {
case tpnme.SignatureATTR =>
if (!isScala && !isScalaRaw) {
- val sig = pool.getExternalName(in.nextChar)
+ val sig = pool.getExternalName(u2)
val newType = sigToType(sym, sig)
sym.setInfo(newType)
}
@@ -860,14 +839,14 @@ abstract class ClassfileParser {
sym.setFlag(SYNTHETIC | ARTIFACT)
in.skip(attrLen)
case tpnme.BridgeATTR =>
- sym.setFlag(BRIDGE)
+ sym.setFlag(BRIDGE | ARTIFACT)
in.skip(attrLen)
case tpnme.DeprecatedATTR =>
val arg = Literal(Constant("see corresponding Javadoc for more information."))
- sym.addAnnotation(definitions.DeprecatedAttr, arg, Literal(Constant("")))
+ sym.addAnnotation(DeprecatedAttr, arg, Literal(Constant("")))
in.skip(attrLen)
case tpnme.ConstantValueATTR =>
- val c = pool.getConstant(in.nextChar)
+ val c = pool.getConstant(u2)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
else debugwarn(s"failure to convert $c to $symtype")
@@ -881,7 +860,7 @@ abstract class ClassfileParser {
isScalaRaw = true
// Attribute on methods of java annotation classes when that method has a default
case tpnme.AnnotationDefaultATTR =>
- sym.addAnnotation(definitions.AnnotationDefaultAttr)
+ sym.addAnnotation(AnnotationDefaultAttr)
in.skip(attrLen)
// Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
case tpnme.RuntimeAnnotationATTR =>
@@ -911,7 +890,7 @@ abstract class ClassfileParser {
parseExceptions(attrLen)
case tpnme.SourceFileATTR =>
- val srcfileLeaf = pool.getName(in.nextChar).toString.trim
+ val srcfileLeaf = readName().toString.trim
val srcpath = sym.enclosingPackage match {
case NoSymbol => srcfileLeaf
case rootMirror.EmptyPackage => srcfileLeaf
@@ -930,8 +909,8 @@ abstract class ClassfileParser {
}
def parseAnnotArg: Option[ClassfileAnnotArg] = {
- val tag = in.nextByte.toChar
- val index = in.nextChar
+ val tag = u1
+ val index = u2
tag match {
case STRING_TAG =>
Some(LiteralAnnotArg(Constant(pool.getName(index).toString)))
@@ -942,7 +921,7 @@ abstract class ClassfileParser {
Some(LiteralAnnotArg(Constant(pool.getType(index))))
case ENUM_TAG =>
val t = pool.getType(index)
- val n = pool.getName(in.nextChar)
+ val n = readName()
val s = t.typeSymbol.companionModule.info.decls.lookup(n)
assert(s != NoSymbol, t)
Some(LiteralAnnotArg(Constant(s)))
@@ -962,43 +941,43 @@ abstract class ClassfileParser {
}
def parseScalaSigBytes: Option[ScalaSigBytes] = {
- val tag = in.nextByte.toChar
+ val tag = u1
assert(tag == STRING_TAG, tag)
- Some(ScalaSigBytes(pool getBytes in.nextChar))
+ Some(ScalaSigBytes(pool getBytes u2))
}
def parseScalaLongSigBytes: Option[ScalaSigBytes] = {
- val tag = in.nextByte.toChar
+ val tag = u1
assert(tag == ARRAY_TAG, tag)
- val stringCount = in.nextChar
+ val stringCount = u2
val entries =
for (i <- 0 until stringCount) yield {
- val stag = in.nextByte.toChar
+ val stag = u1
assert(stag == STRING_TAG, stag)
- in.nextChar.toInt
+ u2
}
Some(ScalaSigBytes(pool.getBytes(entries.toList)))
}
- /** Parse and return a single annotation. If it is malformed,
- * return None.
+ /* Parse and return a single annotation. If it is malformed,
+ * return None.
*/
- def parseAnnotation(attrNameIndex: Char): Option[AnnotationInfo] = try {
+ def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try {
val attrType = pool.getType(attrNameIndex)
- val nargs = in.nextChar
+ val nargs = u2
val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)]
var hasError = false
for (i <- 0 until nargs) {
- val name = pool.getName(in.nextChar)
+ val name = readName()
// The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is
// available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature
// is encoded as a string because of limitations in the Java class file format.
- if ((attrType == definitions.ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
+ if ((attrType == ScalaSignatureAnnotation.tpe) && (name == nme.bytes))
parseScalaSigBytes match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
}
- else if ((attrType == definitions.ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
+ else if ((attrType == ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes))
parseScalaLongSigBytes match {
case Some(c) => nvpairs += ((name, c))
case None => hasError = true
@@ -1023,20 +1002,20 @@ abstract class ClassfileParser {
// with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
// and that should never be swallowed silently.
warning(s"Caught: $ex while parsing annotations in ${in.file}")
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
None // ignore malformed annotations
}
- /**
+ /*
* Parse the "Exceptions" attribute which denotes the exceptions
* thrown by a method.
*/
def parseExceptions(len: Int) {
- val nClasses = in.nextChar
+ val nClasses = u2
for (n <- 0 until nClasses) {
// FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
- val cls = pool.getClassSymbol(in.nextChar.toInt)
+ val cls = pool.getClassSymbol(u2)
// we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
// and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
cls.initialize
@@ -1044,16 +1023,16 @@ abstract class ClassfileParser {
}
}
- /** Parse a sequence of annotations and attaches them to the
- * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+ /* Parse a sequence of annotations and attaches them to the
+ * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
def parseAnnotations(len: Int): Option[AnnotationInfo] = {
- val nAttr = in.nextChar
+ val nAttr = u2
var scalaSigAnnot: Option[AnnotationInfo] = None
for (n <- 0 until nAttr)
- parseAnnotation(in.nextChar) match {
- case Some(scalaSig) if (scalaSig.atp == definitions.ScalaSignatureAnnotation.tpe) =>
+ parseAnnotation(u2) match {
+ case Some(scalaSig) if (scalaSig.atp == ScalaSignatureAnnotation.tpe) =>
scalaSigAnnot = Some(scalaSig)
- case Some(scalaSig) if (scalaSig.atp == definitions.ScalaLongSignatureAnnotation.tpe) =>
+ case Some(scalaSig) if (scalaSig.atp == ScalaLongSignatureAnnotation.tpe) =>
scalaSigAnnot = Some(scalaSig)
case Some(annot) =>
sym.addAnnotation(annot)
@@ -1063,7 +1042,7 @@ abstract class ClassfileParser {
}
// begin parseAttributes
- for (i <- 0 until in.nextChar) parseAttribute()
+ for (i <- 0 until u2) parseAttribute()
}
/** Enter own inner classes in the right scope. It needs the scopes to be set up,
@@ -1073,11 +1052,12 @@ abstract class ClassfileParser {
def className(name: Name): Name =
name.subName(name.lastPos('.') + 1, name.length)
- def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) {
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) {
+ def jflags = entry.jflags
val completer = new global.loaders.ClassfileLoader(file)
val name = entry.originalName
- var sflags = toScalaClassFlags(jflags)
- val owner = getOwner(jflags)
+ val sflags = jflags.toScalaFlags
+ val owner = ownerForFlags(jflags)
val scope = getScope(jflags)
val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer
val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer
@@ -1106,7 +1086,7 @@ abstract class ClassfileParser {
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
throw new AssertionError(entry.externalName)
}
- enterClassAndModule(entry, file, entry.jflags)
+ enterClassAndModule(entry, file)
}
}
}
@@ -1119,26 +1099,27 @@ abstract class ClassfileParser {
skipSuperclasses()
skipMembers() // fields
skipMembers() // methods
- val attrs = in.nextChar
+ val attrs = u2
for (i <- 0 until attrs) {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = readTypeName()
+ val attrLen = u4
attrName match {
case tpnme.SignatureATTR =>
in.skip(attrLen)
case tpnme.ScalaSignatureATTR =>
isScala = true
val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen)
- pbuf.readNat; pbuf.readNat;
+ pbuf.readNat(); pbuf.readNat()
if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature
isScalaAnnot = true // is in a ScalaSignature annotation.
in.skip(attrLen)
case tpnme.ScalaATTR =>
isScalaRaw = true
case tpnme.InnerClassesATTR if !isScala =>
- val entries = in.nextChar.toInt
+ val entries = u2
for (i <- 0 until entries) {
- val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+ val innerIndex, outerIndex, nameIndex = u2
+ val jflags = readInnerClassFlags()
if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
}
@@ -1150,31 +1131,19 @@ abstract class ClassfileParser {
}
/** An entry in the InnerClasses attribute of this class file. */
- case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
+ case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: JavaAccFlags) {
def externalName = pool getClassName external
def outerName = pool getClassName outer
def originalName = pool getName name
- def isStatic = ClassfileParser.this.isStatic(jflags)
def isModule = originalName.isTermName
- def scope = if (isStatic) staticScope else instanceScope
- def enclosing = if (isStatic) enclModule else enclClass
+ def scope = if (jflags.isStatic) staticScope else instanceScope
+ def enclosing = if (jflags.isStatic) enclModule else enclClass
// The name of the outer class, without its trailing $ if it has one.
private def strippedOuter = nme stripModuleSuffix outerName
private def isInner = innerClasses contains strippedOuter
private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
private def enclModule = enclClass.companionModule
-
- private def staticWord = if (isStatic) "static " else ""
- override def toString = s"$staticWord$originalName in $outerName ($externalName)"
- }
-
- /** Return the Symbol of the top level class enclosing `name`,
- * or the symbol of `name` itself if no enclosing classes are found.
- */
- def topLevelClass(name: Name): Symbol = innerClasses getEntry name match {
- case Some(entry) => topLevelClass(entry.outerName)
- case _ => classNameToSymbol(name)
}
/** Return the class symbol for the given name. It looks it up in its outer class.
@@ -1202,7 +1171,7 @@ abstract class ClassfileParser {
// if loading during initialization of `definitions` typerPhase is not yet set.
// in that case we simply load the member at the current phase
@inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
- if (currentRun.typerPhase eq null) body else beforeTyper(body)
+ if (currentRun.typerPhase eq null) body else enteringTyper(body)
private def innerSymbol(entry: InnerClassEntry): Symbol = {
val name = entry.originalName.toTypeName
@@ -1212,7 +1181,7 @@ abstract class ClassfileParser {
else enclosing.info member name
)
enteringTyperIfPossible(getMember)
- /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ /* There used to be an assertion that this result is not NoSymbol; changing it to an error
* revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
* in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
* thought it wasn't triggering, I removed it entirely.
@@ -1220,6 +1189,9 @@ abstract class ClassfileParser {
}
}
+ class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
+ override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
+ }
class LazyAliasType(alias: Symbol) extends LazyType with FlagAgnosticCompleter {
override def complete(sym: Symbol) {
sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun)
@@ -1227,32 +1199,29 @@ abstract class ClassfileParser {
}
def skipAttributes() {
- val attrCount = in.nextChar
- for (i <- 0 until attrCount) {
- in.skip(2); in.skip(in.nextInt)
+ var attrCount: Int = u2
+ while (attrCount > 0) {
+ in skip 2
+ in skip u4
+ attrCount -= 1
}
}
def skipMembers() {
- val memberCount = in.nextChar
- for (i <- 0 until memberCount) {
- in.skip(6); skipAttributes()
+ var memberCount: Int = u2
+ while (memberCount > 0) {
+ in skip 6
+ skipAttributes()
+ memberCount -= 1
}
}
def skipSuperclasses() {
in.skip(2) // superclass
- val ifaces = in.nextChar
+ val ifaces = u2
in.skip(2 * ifaces)
}
- protected def getOwner(flags: Int): Symbol =
- if (isStatic(flags)) moduleClass else clazz
-
- protected def getScope(flags: Int): Scope =
- if (isStatic(flags)) staticScope else instanceScope
-
- private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
- private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
- private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+ protected def getScope(flags: JavaAccFlags): Scope =
+ if (flags.isStatic) staticScope else instanceScope
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index c304c18c4f..01a117895f 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -3,15 +3,15 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package symtab
package classfile
import scala.collection.{ mutable, immutable }
import mutable.ListBuffer
-import backend.icode._
import ClassfileConstants._
-import scala.reflect.internal.Flags._
+import scala.reflect.internal.JavaAccFlags
/** ICode reader from Java bytecode.
*
@@ -33,7 +33,6 @@ abstract class ICodeReader extends ClassfileParser {
* for non-static members.
*/
def readClass(cls: Symbol): (IClass, IClass) = {
- var classFile: io.AbstractFile = null;
cls.info // ensure accurate type information
isScalaModule = cls.isModule && !cls.isJavaDefined
@@ -48,58 +47,55 @@ abstract class ICodeReader extends ClassfileParser {
(staticCode, instanceCode)
}
- /** If we're parsing a scala module, the owner of members is always
- * the module symbol.
- */
- override def getOwner(jflags: Int): Symbol =
- if (isScalaModule) this.staticModule
- else super.getOwner(jflags)
-
override def parseClass() {
this.instanceCode = new IClass(clazz)
this.staticCode = new IClass(staticModule)
- val jflags = in.nextChar
- val isAttribute = (jflags & JAVA_ACC_ANNOTATION) != 0
- val sflags = toScalaClassFlags(jflags) // what, this is never used??
- val c = pool getClassSymbol in.nextChar
+ u2
+ pool getClassSymbol u2
parseInnerClasses()
in.skip(2) // super class
- in.skip(2 * in.nextChar) // interfaces
- val fieldCount = in.nextChar
+ in.skip(2 * u2) // interfaces
+ val fieldCount = u2
for (i <- 0 until fieldCount) parseField()
- val methodCount = in.nextChar
- for (i <- 0 until methodCount) parseMethod();
+ val methodCount = u2
+ for (i <- 0 until methodCount) parseMethod()
instanceCode.methods = instanceCode.methods.reverse
staticCode.methods = staticCode.methods.reverse
}
override def parseField() {
- val (jflags, sym) = parseMember(true)
+ val (jflags, sym) = parseMember(field = true)
getCode(jflags) addField new IField(sym)
skipAttributes()
}
- private def parseMember(field: Boolean): (Int, Symbol) = {
- val jflags = in.nextChar
- val name = pool getName in.nextChar
- val owner = getOwner(jflags)
- val dummySym = owner.newMethod(name, owner.pos, toScalaMethodFlags(jflags))
+ private def parseMember(field: Boolean): (JavaAccFlags, Symbol) = {
+ val jflags = JavaAccFlags(u2)
+ val name = pool getName u2
+ /* If we're parsing a scala module, the owner of members is always
+ * the module symbol.
+ */
+ val owner = (
+ if (isScalaModule) staticModule
+ else if (jflags.isStatic) moduleClass
+ else clazz
+ )
+ val dummySym = owner.newMethod(name.toTermName, owner.pos, jflags.toScalaFlags)
try {
- val ch = in.nextChar
+ val ch = u2
val tpe = pool.getType(dummySym, ch)
if ("<clinit>" == name.toString)
(jflags, NoSymbol)
else {
- val owner = getOwner(jflags)
- var sym = owner.info.findMember(name, 0, 0, false).suchThat(old => sameType(old.tpe, tpe))
+ var sym = owner.info.findMember(name, 0, 0, stableOnly = false).suchThat(old => sameType(old.tpe, tpe))
if (sym == NoSymbol)
- sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
+ sym = owner.info.findMember(newTermName(name + nme.LOCAL_SUFFIX_STRING), 0, 0, stableOnly = false).suchThat(_.tpe =:= tpe)
if (sym == NoSymbol) {
- sym = if (field) owner.newValue(name, owner.pos, toScalaFieldFlags(jflags)) else dummySym
+ sym = if (field) owner.newValue(name.toTermName, owner.pos, jflags.toScalaFlags) else dummySym
sym setInfoAndEnter tpe
log(s"ICodeReader could not locate ${name.decode} in $owner. Created ${sym.defString}.")
}
@@ -124,31 +120,31 @@ abstract class ICodeReader extends ClassfileParser {
}
override def parseMethod() {
- val (jflags, sym) = parseMember(false)
- var beginning = in.bp
+ val (jflags, sym) = parseMember(field = false)
+ val beginning = in.bp
try {
if (sym != NoSymbol) {
this.method = new IMethod(sym)
this.method.returnType = toTypeKind(sym.tpe.resultType)
getCode(jflags).addMethod(this.method)
- if ((jflags & JAVA_ACC_NATIVE) != 0)
+ if (jflags.isNative)
this.method.native = true
- val attributeCount = in.nextChar
+ val attributeCount = u2
for (i <- 0 until attributeCount) parseAttribute()
} else {
- debuglog("Skipping non-existent method.");
- skipAttributes();
+ debuglog("Skipping non-existent method.")
+ skipAttributes()
}
} catch {
case e: MissingRequirementError =>
- in.bp = beginning; skipAttributes
- debuglog("Skipping non-existent method. " + e.msg);
+ in.bp = beginning; skipAttributes()
+ debuglog("Skipping non-existent method. " + e.msg)
}
}
def parseAttribute() {
- val attrName = pool.getName(in.nextChar).toTypeName
- val attrLen = in.nextInt
+ val attrName = pool.getName(u2).toTypeName
+ val attrLen = u4
attrName match {
case tpnme.CodeATTR =>
parseByteCode()
@@ -169,12 +165,12 @@ abstract class ICodeReader extends ClassfileParser {
rootMirror.getClassByName(name)
}
else if (nme.isModuleName(name)) {
- val strippedName = nme.stripModuleSuffix(name)
- forceMangledName(newTermName(strippedName.decode), true) orElse rootMirror.getModule(strippedName)
+ val strippedName = name.dropModule
+ forceMangledName(newTermName(strippedName.decode), module = true) orElse rootMirror.getModuleByName(strippedName)
}
else {
- forceMangledName(name, false)
- afterFlatten(rootMirror.getClassByName(name.toTypeName))
+ forceMangledName(name, module = false)
+ exitingFlatten(rootMirror.getClassByName(name.toTypeName))
}
if (sym.isModule)
sym.moduleClass
@@ -192,9 +188,9 @@ abstract class ICodeReader extends ClassfileParser {
/** Parse java bytecode into ICode */
def parseByteCode() {
- maxStack = in.nextChar
- maxLocals = in.nextChar
- val codeLength = in.nextInt
+ maxStack = u2
+ maxLocals = u2
+ val codeLength = u4
val code = new LinearCode
def parseInstruction() {
@@ -202,27 +198,26 @@ abstract class ICodeReader extends ClassfileParser {
import code._
var size = 1 // instruction size
- /** Parse 16 bit jump target. */
+ /* Parse 16 bit jump target. */
def parseJumpTarget = {
size += 2
- val offset = in.nextChar.toShort
+ val offset = u2.toShort
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target)
target
}
- /** Parse 32 bit jump target. */
+ /* Parse 32 bit jump target. */
def parseJumpTargetW: Int = {
size += 4
- val offset = in.nextInt
+ val offset = u4
val target = pc + offset
assert(target >= 0 && target < codeLength, "Illegal jump target: " + target + "pc: " + pc + " offset: " + offset)
target
}
- val instr = toUnsignedByte(in.nextByte)
- instr match {
- case JVM.nop => parseInstruction
+ u1 match {
+ case JVM.nop => parseInstruction()
case JVM.aconst_null => code emit CONSTANT(Constant(null))
case JVM.iconst_m1 => code emit CONSTANT(Constant(-1))
case JVM.iconst_0 => code emit CONSTANT(Constant(0))
@@ -240,21 +235,21 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dconst_0 => code emit CONSTANT(Constant(0.0))
case JVM.dconst_1 => code emit CONSTANT(Constant(1.0))
- case JVM.bipush => code.emit(CONSTANT(Constant(in.nextByte))); size += 1
- case JVM.sipush => code.emit(CONSTANT(Constant(in.nextChar))); size += 2
- case JVM.ldc => code.emit(CONSTANT(pool.getConstant(toUnsignedByte(in.nextByte)))); size += 1
- case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
- case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(in.nextChar))); size += 2
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
+ case JVM.bipush => code.emit(CONSTANT(Constant(u1))); size += 1
+ case JVM.sipush => code.emit(CONSTANT(Constant(u2))); size += 2
+ case JVM.ldc => code.emit(CONSTANT(pool.getConstant(u1))); size += 1
+ case JVM.ldc_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+ case JVM.ldc2_w => code.emit(CONSTANT(pool.getConstant(u2))); size += 2
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u1, INT))); size += 1
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u1, LONG))); size += 1
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u1, FLOAT))); size += 1
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
case JVM.aload =>
- val local = in.nextByte.toInt; size += 1
+ val local = u1.toInt; size += 1
if (local == 0 && !method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(local, ObjectReference)))
case JVM.iload_0 => code.emit(LOAD_LOCAL(code.getLocal(0, INT)))
case JVM.iload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, INT)))
@@ -274,9 +269,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.dload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, DOUBLE)))
case JVM.aload_0 =>
if (!method.isStatic)
- code.emit(THIS(method.symbol.owner));
+ code.emit(THIS(method.symbol.owner))
else
- code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)));
+ code.emit(LOAD_LOCAL(code.getLocal(0, ObjectReference)))
case JVM.aload_1 => code.emit(LOAD_LOCAL(code.getLocal(1, ObjectReference)))
case JVM.aload_2 => code.emit(LOAD_LOCAL(code.getLocal(2, ObjectReference)))
case JVM.aload_3 => code.emit(LOAD_LOCAL(code.getLocal(3, ObjectReference)))
@@ -290,11 +285,11 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.caload => code.emit(LOAD_ARRAY_ITEM(CHAR))
case JVM.saload => code.emit(LOAD_ARRAY_ITEM(SHORT))
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, INT))); size += 1
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, LONG))); size += 1
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, FLOAT))); size += 1
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, DOUBLE))); size += 1
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextByte, ObjectReference))); size += 1
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u1, INT))); size += 1
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u1, LONG))); size += 1
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u1, FLOAT))); size += 1
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u1, DOUBLE))); size += 1
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u1, ObjectReference))); size += 1
case JVM.istore_0 => code.emit(STORE_LOCAL(code.getLocal(0, INT)))
case JVM.istore_1 => code.emit(STORE_LOCAL(code.getLocal(1, INT)))
case JVM.istore_2 => code.emit(STORE_LOCAL(code.getLocal(2, INT)))
@@ -378,9 +373,9 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.lxor => code.emit(CALL_PRIMITIVE(Logical(XOR, LONG)))
case JVM.iinc =>
size += 2
- val local = code.getLocal(in.nextByte, INT)
+ val local = code.getLocal(u1, INT)
code.emit(LOAD_LOCAL(local))
- code.emit(CONSTANT(Constant(in.nextByte)))
+ code.emit(CONSTANT(Constant(u1)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
code.emit(STORE_LOCAL(local))
@@ -430,14 +425,14 @@ abstract class ICodeReader extends ClassfileParser {
size += padding
in.bp += padding
assert((pc + size % 4) != 0, pc)
-/* var byte1 = in.nextByte; size += 1;
- while (byte1 == 0) { byte1 = in.nextByte; size += 1; }
- val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte;
+/* var byte1 = u1; size += 1;
+ while (byte1 == 0) { byte1 = u1; size += 1; }
+ val default = byte1 << 24 | u1 << 16 | u1 << 8 | u1;
size = size + 3
*/
- val default = pc + in.nextInt; size += 4
- val low = in.nextInt
- val high = in.nextInt
+ val default = pc + u4; size += 4
+ val low = u4
+ val high = u4
size += 8
assert(low <= high, "Value low not <= high for tableswitch.")
@@ -450,13 +445,13 @@ abstract class ICodeReader extends ClassfileParser {
size += padding
in.bp += padding
assert((pc + size % 4) != 0, pc)
- val default = pc + in.nextInt; size += 4
- val npairs = in.nextInt; size += 4
+ val default = pc + u4; size += 4
+ val npairs = u4; size += 4
var tags: List[List[Int]] = Nil
var targets: List[Int] = Nil
var i = 0
while (i < npairs) {
- tags = List(in.nextInt) :: tags; size += 4
+ tags = List(u4) :: tags; size += 4
targets = parseJumpTargetW :: targets; // parseJumpTargetW updates 'size' itself
i += 1
}
@@ -471,54 +466,54 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.return_ => code.emit(RETURN(UNIT))
case JVM.getstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size += 2
+ val field = pool.getMemberSymbol(u2, static = true); size += 2
if (field.hasModuleFlag)
code emit LOAD_MODULE(field)
else
- code emit LOAD_FIELD(field, true)
+ code emit LOAD_FIELD(field, isStatic = true)
case JVM.putstatic =>
- val field = pool.getMemberSymbol(in.nextChar, true); size += 2
- code.emit(STORE_FIELD(field, true))
+ val field = pool.getMemberSymbol(u2, static = true); size += 2
+ code.emit(STORE_FIELD(field, isStatic = true))
case JVM.getfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size += 2
- code.emit(LOAD_FIELD(field, false))
+ val field = pool.getMemberSymbol(u2, static = false); size += 2
+ code.emit(LOAD_FIELD(field, isStatic = false))
case JVM.putfield =>
- val field = pool.getMemberSymbol(in.nextChar, false); size += 2
- code.emit(STORE_FIELD(field, false))
+ val field = pool.getMemberSymbol(u2, static = false); size += 2
+ code.emit(STORE_FIELD(field, isStatic = false))
case JVM.invokevirtual =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 2
+ val m = pool.getMemberSymbol(u2, static = false); size += 2
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokeinterface =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 4
+ val m = pool.getMemberSymbol(u2, static = false); size += 4
in.skip(2)
code.emit(CALL_METHOD(m, Dynamic))
case JVM.invokespecial =>
- val m = pool.getMemberSymbol(in.nextChar, false); size += 2
- val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(true)
- else SuperCall(m.owner.name);
+ val m = pool.getMemberSymbol(u2, static = false); size += 2
+ val style = if (m.name == nme.CONSTRUCTOR || m.isPrivate) Static(onInstance = true)
+ else SuperCall(m.owner.name)
code.emit(CALL_METHOD(m, style))
case JVM.invokestatic =>
- val m = pool.getMemberSymbol(in.nextChar, true); size += 2
+ val m = pool.getMemberSymbol(u2, static = true); size += 2
if (isBox(m))
code.emit(BOX(toTypeKind(m.info.paramTypes.head)))
else if (isUnbox(m))
code.emit(UNBOX(toTypeKind(m.info.resultType)))
else
- code.emit(CALL_METHOD(m, Static(false)))
+ code.emit(CALL_METHOD(m, Static(onInstance = false)))
case JVM.invokedynamic =>
// TODO, this is just a place holder. A real implementation must parse the class constant entry
debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
containsInvokeDynamic = true
- val poolEntry = in.nextChar
+ val poolEntry = in.nextChar.toInt
in.skip(2)
code.emit(INVOKE_DYNAMIC(poolEntry))
case JVM.new_ =>
- code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
+ code.emit(NEW(REFERENCE(pool.getClassSymbol(u2))))
size += 2
case JVM.newarray =>
- val kind = in.nextByte match {
+ val kind = u1 match {
case T_BOOLEAN => BOOL
case T_CHAR => CHAR
case T_FLOAT => FLOAT
@@ -532,35 +527,35 @@ abstract class ICodeReader extends ClassfileParser {
code.emit(CREATE_ARRAY(kind, 1))
case JVM.anewarray =>
- val tpe = pool.getClassOrArrayType(in.nextChar); size += 2
+ val tpe = pool.getClassOrArrayType(u2); size += 2
code.emit(CREATE_ARRAY(toTypeKind(tpe), 1))
case JVM.arraylength => code.emit(CALL_PRIMITIVE(ArrayLength(ObjectReference))); // the kind does not matter
case JVM.athrow => code.emit(THROW(definitions.ThrowableClass))
case JVM.checkcast =>
- code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ code.emit(CHECK_CAST(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
case JVM.instanceof =>
- code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(in.nextChar)))); size += 2
+ code.emit(IS_INSTANCE(toTypeKind(pool.getClassOrArrayType(u2)))); size += 2
case JVM.monitorenter => code.emit(MONITOR_ENTER())
case JVM.monitorexit => code.emit(MONITOR_EXIT())
case JVM.wide =>
size += 1
- toUnsignedByte(in.nextByte) match {
- case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
- case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
- case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
- case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
- case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
- case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, INT))); size += 2
- case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, LONG))); size += 2
- case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, FLOAT))); size += 2
- case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, DOUBLE))); size += 2
- case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(in.nextChar, ObjectReference))); size += 2
+ u1 match {
+ case JVM.iload => code.emit(LOAD_LOCAL(code.getLocal(u2, INT))); size += 2
+ case JVM.lload => code.emit(LOAD_LOCAL(code.getLocal(u2, LONG))); size += 2
+ case JVM.fload => code.emit(LOAD_LOCAL(code.getLocal(u2, FLOAT))); size += 2
+ case JVM.dload => code.emit(LOAD_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+ case JVM.aload => code.emit(LOAD_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
+ case JVM.istore => code.emit(STORE_LOCAL(code.getLocal(u2, INT))); size += 2
+ case JVM.lstore => code.emit(STORE_LOCAL(code.getLocal(u2, LONG))); size += 2
+ case JVM.fstore => code.emit(STORE_LOCAL(code.getLocal(u2, FLOAT))); size += 2
+ case JVM.dstore => code.emit(STORE_LOCAL(code.getLocal(u2, DOUBLE))); size += 2
+ case JVM.astore => code.emit(STORE_LOCAL(code.getLocal(u2, ObjectReference))); size += 2
case JVM.ret => sys.error("Cannot handle jsr/ret")
case JVM.iinc =>
size += 4
- val local = code.getLocal(in.nextChar, INT)
- code.emit(CONSTANT(Constant(in.nextChar)))
+ val local = code.getLocal(u2, INT)
+ code.emit(CONSTANT(Constant(u2)))
code.emit(CALL_PRIMITIVE(Arithmetic(ADD, INT)))
code.emit(STORE_LOCAL(local))
case _ => sys.error("Invalid 'wide' operand")
@@ -568,8 +563,8 @@ abstract class ICodeReader extends ClassfileParser {
case JVM.multianewarray =>
size += 3
- val tpe = toTypeKind(pool getClassOrArrayType in.nextChar)
- val dim = in.nextByte
+ val tpe = toTypeKind(pool getClassOrArrayType u2)
+ val dim = u1
// assert(dim == 1, "Cannot handle multidimensional arrays yet.")
code emit CREATE_ARRAY(tpe, dim)
@@ -593,16 +588,16 @@ abstract class ICodeReader extends ClassfileParser {
}
pc = 0
- while (pc < codeLength) parseInstruction
+ while (pc < codeLength) parseInstruction()
- val exceptionEntries = in.nextChar.toInt
+ val exceptionEntries = u2.toInt
code.containsEHs = (exceptionEntries != 0)
var i = 0
while (i < exceptionEntries) {
// skip start end PC
in.skip(4)
// read the handler PC
- code.jmpTargets += in.nextChar
+ code.jmpTargets += u2
// skip the exception type
in.skip(2)
i += 1
@@ -638,15 +633,13 @@ abstract class ICodeReader extends ClassfileParser {
/** Return the icode class that should include members with the given flags.
* There are two possible classes, the static part and the instance part.
*/
- def getCode(flags: Int): IClass =
- if (isScalaModule) staticCode
- else if ((flags & JAVA_ACC_STATIC) != 0) staticCode
- else instanceCode
+ def getCode(flags: JavaAccFlags): IClass =
+ if (isScalaModule || flags.isStatic) staticCode else instanceCode
class LinearCode {
- var instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
- var jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
- var locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
+ val instrs: ListBuffer[(Int, Instruction)] = new ListBuffer
+ val jmpTargets: mutable.Set[Int] = perRunCaches.newSet[Int]()
+ val locals: mutable.Map[Int, List[(Local, TypeKind)]] = perRunCaches.newMap()
var containsDUPX = false
var containsNEW = false
@@ -678,7 +671,6 @@ abstract class ICodeReader extends ClassfileParser {
val blocks = makeBasicBlocks
var otherBlock: BasicBlock = NoBasicBlock
- var disableJmpTarget = false
for ((pc, instr) <- instrs.iterator) {
// Console.println("> " + pc + ": " + instr);
@@ -686,7 +678,7 @@ abstract class ICodeReader extends ClassfileParser {
otherBlock = blocks(pc)
if (!bb.closed && otherBlock != bb) {
bb.emit(JUMP(otherBlock))
- bb.close
+ bb.close()
// Console.println("\t> closing bb: " + bb)
}
bb = otherBlock
@@ -729,46 +721,44 @@ abstract class ICodeReader extends ClassfileParser {
val tfa = new analysis.MethodTFA() {
import analysis._
- import analysis.typeFlowLattice.IState
/** Abstract interpretation for one instruction. */
override def mutatingInterpret(out: typeFlowLattice.Elem, i: Instruction): typeFlowLattice.Elem = {
- val bindings = out.vars
val stack = out.stack
import stack.push
i match {
case DUP_X1 =>
val (one, two) = stack.pop2
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
case DUP_X2 =>
val (one, two, three) = stack.pop3
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
case DUP2_X1 =>
val (one, two) = stack.pop2
if (one.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else {
val three = stack.pop
- push(two); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(three); push(two); push(one)
}
case DUP2_X2 =>
val (one, two) = stack.pop2
if (one.isWideType && two.isWideType) {
- push(one); push(two); push(one);
+ push(one); push(two); push(one)
} else if (one.isWideType) {
val three = stack.pop
assert(!three.isWideType, "Impossible")
- push(one); push(three); push(two); push(one);
+ push(one); push(three); push(two); push(one)
} else {
val three = stack.pop
if (three.isWideType) {
- push(two); push(one); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(one); push(three); push(two); push(one)
} else {
val four = stack.pop
- push(two); push(one); push(four); push(one); push(three); push(two); push(one);
+ push(two); push(one); push(four); push(one); push(three); push(two); push(one)
}
}
@@ -781,7 +771,7 @@ abstract class ICodeReader extends ClassfileParser {
// method.dump
tfa.init(method)
- tfa.run
+ tfa.run()
for (bb <- linearizer.linearize(method)) {
var info = tfa.in(bb)
for (i <- bb.toList) {
@@ -796,7 +786,7 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
case DUP_X2 =>
val one = info.stack.types(0)
@@ -809,30 +799,30 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
else {
- val tmp3 = freshLocal(info.stack.types(2));
+ val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X1 =>
val one = info.stack.types(0)
val two = info.stack.types(1)
- val tmp1 = freshLocal(one);
- val tmp2 = freshLocal(two);
+ val tmp1 = freshLocal(one)
+ val tmp2 = freshLocal(two)
if (one.isWideType) {
assert(!two.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val tmp3 = freshLocal(info.stack.types(2))
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
@@ -841,7 +831,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
case DUP2_X2 =>
@@ -854,21 +844,21 @@ abstract class ICodeReader extends ClassfileParser {
STORE_LOCAL(tmp2),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else if (one.isWideType) {
val three = info.stack.types(2)
assert(!two.isWideType && !three.isWideType, "Impossible")
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
STORE_LOCAL(tmp3),
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val three = info.stack.types(2)
- val tmp3 = freshLocal(three);
+ val tmp3 = freshLocal(three)
if (three.isWideType) {
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -877,10 +867,10 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp1),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
} else {
val four = info.stack.types(3)
- val tmp4 = freshLocal(three);
+ val tmp4 = freshLocal(three)
assert(!four.isWideType, "Impossible")
bb.replaceInstruction(i, List(STORE_LOCAL(tmp1),
STORE_LOCAL(tmp2),
@@ -891,7 +881,7 @@ abstract class ICodeReader extends ClassfileParser {
LOAD_LOCAL(tmp4),
LOAD_LOCAL(tmp3),
LOAD_LOCAL(tmp2),
- LOAD_LOCAL(tmp1)));
+ LOAD_LOCAL(tmp1)))
}
}
case _ =>
@@ -906,11 +896,11 @@ abstract class ICodeReader extends ClassfileParser {
import opcodes._
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis
rdef.init(method)
- rdef.run
+ rdef.run()
for (bb <- method.code.blocks ; (i, idx) <- bb.toList.zipWithIndex) i match {
case cm @ CALL_METHOD(m, Static(true)) if m.isClassConstructor =>
- def loop(bb0: BasicBlock, idx0: Int, depth: Int = 0): Unit = {
+ def loop(bb0: BasicBlock, idx0: Int, depth: Int): Unit = {
rdef.findDefs(bb0, idx0, 1, depth) match {
case ((bb1, idx1)) :: _ =>
bb1(idx1) match {
@@ -929,6 +919,7 @@ abstract class ICodeReader extends ClassfileParser {
}
/** Return the local at given index, with the given type. */
+ def getLocal(idx: Char, kind: TypeKind): Local = getLocal(idx.toInt, kind)
def getLocal(idx: Int, kind: TypeKind): Local = {
assert(idx < maxLocals, "Index too large for local variable.")
@@ -947,7 +938,7 @@ abstract class ICodeReader extends ClassfileParser {
locals.get(idx) match {
case Some(ls) =>
- val l = ls find { loc => loc._2 <:< kind }
+ val l = ls find { loc => loc._2 isAssignabledTo kind }
l match {
case Some((loc, _)) => loc
case None =>
@@ -958,8 +949,8 @@ abstract class ICodeReader extends ClassfileParser {
l
}
case None =>
- checkValidIndex
- val l = freshLocal(idx, kind, false)
+ checkValidIndex()
+ val l = freshLocal(idx, kind, isArg = false)
debuglog("Added new local for idx " + idx + ": " + kind)
locals += (idx -> List((l, kind)))
l
@@ -971,7 +962,7 @@ abstract class ICodeReader extends ClassfileParser {
/** Return a fresh Local variable for the given index.
*/
private def freshLocal(idx: Int, kind: TypeKind, isArg: Boolean) = {
- val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType);
+ val sym = method.symbol.newVariable(newTermName("loc" + idx)).setInfo(kind.toType)
val l = new Local(sym, kind, isArg)
method.addLocal(l)
l
@@ -983,7 +974,7 @@ abstract class ICodeReader extends ClassfileParser {
* the original method. */
def freshLocal(kind: TypeKind): Local = {
count += 1
- freshLocal(maxLocals + count, kind, false)
+ freshLocal(maxLocals + count, kind, isArg = false)
}
/** add a method param with the given index. */
@@ -1001,7 +992,8 @@ abstract class ICodeReader extends ClassfileParser {
jmpTargets += pc
}
- case class LJUMP(pc: Int) extends LazyJump(pc);
+ case class LJUMP(pc: Int) extends LazyJump(pc)
+
case class LCJUMP(success: Int, failure: Int, cond: TestOp, kind: TypeKind)
extends LazyJump(success) {
override def toString(): String = "LCJUMP (" + kind + ") " + success + " : " + failure
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index ed7eb6d307..3265af9f5b 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -26,12 +26,8 @@ import Flags._
abstract class Pickler extends SubComponent {
import global._
- private final val showSig = false
-
val phaseName = "pickler"
- currentRun
-
def newPhase(prev: Phase): StdPhase = new PicklePhase(prev)
class PicklePhase(prev: Phase) extends StdPhase(prev) {
@@ -68,7 +64,7 @@ abstract class Pickler extends SubComponent {
return
}
- if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
+ if (!t.isDef && t.hasSymbolField && t.symbol.isTermMacro) {
unit.error(t.pos, "macro has not been expanded")
return
}
@@ -134,11 +130,34 @@ abstract class Pickler extends SubComponent {
true
}
+ /** If the symbol is a type skolem, deskolemize and log it.
+ * If we fail to deskolemize, in a method like
+ * trait Trait[+A] { def f[CC[X]] : CC[A] }
+ * the applied type CC[A] will hold a different CC symbol
+ * than the type-constructor type-parameter CC.
+ */
+ private def deskolemize(sym: Symbol) = {
+ if (sym.isTypeSkolem) {
+ val sym1 = sym.deSkolemize
+ log({
+ val what0 = sym.defString
+ val what = sym1.defString match {
+ case `what0` => what0
+ case other => what0 + "->" + other
+ }
+ val where = sym.enclMethod.fullLocationString
+ s"deskolemizing $what in $where"
+ })
+ sym1
+ }
+ else sym
+ }
+
/** Store symbol in index. If symbol is local, also store everything it references.
- *
- * @param sym ...
*/
- def putSymbol(sym: Symbol) {
+ def putSymbol(sym0: Symbol) {
+ val sym = deskolemize(sym0)
+
if (putEntry(sym)) {
if (isLocal(sym)) {
putEntry(sym.name)
@@ -146,7 +165,7 @@ abstract class Pickler extends SubComponent {
putSymbol(sym.privateWithin)
putType(sym.info)
if (sym.thisSym.tpeHK != sym.tpeHK)
- putType(sym.typeOfThis);
+ putType(sym.typeOfThis)
putSymbol(sym.alias)
if (!sym.children.isEmpty) {
val (locals, globals) = sym.children partition (_.isLocalClass)
@@ -173,7 +192,7 @@ abstract class Pickler extends SubComponent {
*/
private def putType(tp: Type): Unit = if (putEntry(tp)) {
tp match {
- case NoType | NoPrefix /*| DeBruijnIndex(_, _) */ =>
+ case NoType | NoPrefix =>
;
case ThisType(sym) =>
putSymbol(sym)
@@ -203,7 +222,7 @@ abstract class Pickler extends SubComponent {
case NullaryMethodType(restpe) =>
putType(restpe)
case PolyType(tparams, restpe) =>
- /** no longer needed since all params are now local
+ /* no longer needed since all params are now local
tparams foreach { tparam =>
if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local
}
@@ -213,14 +232,14 @@ abstract class Pickler extends SubComponent {
// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
// boundSyms = tparams ::: boundSyms
// try {
- putType(restpe);
-// } finally {
+ putType(restpe)
+ // } finally {
// boundSyms = savedBoundSyms
// }
putSymbols(tparams)
case AnnotatedType(annotations, underlying, selfsym) =>
putType(underlying)
- if (settings.selfInAnnots.value) putSymbol(selfsym)
+ if (settings.selfInAnnots) putSymbol(selfsym)
putAnnotations(annotations filter (_.isStatic))
case _ =>
throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")")
@@ -231,7 +250,7 @@ abstract class Pickler extends SubComponent {
private def putTree(tree: Tree): Unit = if (putEntry(tree)) {
if (tree != EmptyTree)
putType(tree.tpe)
- if (tree.hasSymbol)
+ if (tree.hasSymbolField)
putSymbol(tree.symbol)
tree match {
@@ -422,7 +441,7 @@ abstract class Pickler extends SubComponent {
* argument of some Annotation */
private def putMods(mods: Modifiers) = if (putEntry(mods)) {
// annotations in Modifiers are removed by the typechecker
- val Modifiers(flags, privateWithin, Nil) = mods
+ val Modifiers(_, privateWithin, Nil) = mods
putEntry(privateWithin)
}
@@ -490,7 +509,13 @@ abstract class Pickler extends SubComponent {
/** Write a reference to object, i.e., the object's number in the map index.
*/
- private def writeRef(ref: AnyRef) { writeNat(index(ref)) }
+ private def writeRef(ref0: AnyRef) {
+ val ref = ref0 match {
+ case sym: Symbol => deskolemize(sym)
+ case _ => ref0
+ }
+ writeNat(index(ref))
+ }
private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
private def writeRefsWithLength(refs: List[AnyRef]) {
writeNat(refs.length)
@@ -502,7 +527,7 @@ abstract class Pickler extends SubComponent {
private def writeSymInfo(sym: Symbol) {
writeRef(sym.name)
writeRef(localizedOwner(sym))
- writeLongNat((rawToPickledFlags(sym.flags & PickledFlags)))
+ writeLongNat((rawToPickledFlags(sym.rawflags & PickledFlags)))
if (sym.hasAccessBoundary) writeRef(sym.privateWithin)
writeRef(sym.info)
}
@@ -563,7 +588,7 @@ abstract class Pickler extends SubComponent {
tag
case sym: ClassSymbol =>
writeSymInfo(sym)
- if (sym.thisSym.tpe != sym.tpe) writeRef(sym.typeOfThis)
+ if (sym.thisSym.tpe_* != sym.tpe_*) writeRef(sym.typeOfThis)
CLASSsym
case sym: TypeSymbol =>
writeSymInfo(sym)
@@ -604,12 +629,10 @@ abstract class Pickler extends SubComponent {
writeRef(restpe); writeRefs(tparams); POLYtpe
case ExistentialType(tparams, restpe) =>
writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
- // case DeBruijnIndex(l, i) =>
- // writeNat(l); writeNat(i); DEBRUIJNINDEXtpe
case c @ Constant(_) =>
if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
- else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue))
+ else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue).toLong)
else if (c.tag == DoubleTag) writeLong(doubleToLongBits(c.doubleValue))
else if (c.tag == StringTag) writeRef(newTermName(c.stringValue))
else if (c.tag == ClazzTag) writeRef(c.typeValue)
@@ -619,7 +642,7 @@ abstract class Pickler extends SubComponent {
annotations filter (_.isStatic) match {
case Nil => writeBody(tp) // write the underlying type if there are no annotations
case staticAnnots =>
- if (settings.selfInAnnots.value && selfsym != NoSymbol)
+ if (settings.selfInAnnots && selfsym != NoSymbol)
writeRef(selfsym)
writeRef(tp)
writeRefs(staticAnnots)
@@ -988,115 +1011,6 @@ abstract class Pickler extends SubComponent {
patchNat(startpos + 1, writeIndex - (startpos + 2))
}
- /** Print entry for diagnostics */
- def printEntryAtIndex(idx: Int) = printEntry(entries(idx))
- def printEntry(entry: AnyRef) {
- def printRef(ref: AnyRef) {
- print(index(ref)+
- (if (ref.isInstanceOf[Name]) "("+ref+") " else " "))
- }
- def printRefs(refs: List[AnyRef]) { refs foreach printRef }
- def printSymInfo(sym: Symbol) {
- var posOffset = 0
- printRef(sym.name)
- printRef(localizedOwner(sym))
- print(flagsToString(sym.flags & PickledFlags)+" ")
- if (sym.hasAccessBoundary) printRef(sym.privateWithin)
- printRef(sym.info)
- }
- def printBody(entry: AnyRef) = entry match {
- case name: Name =>
- print((if (name.isTermName) "TERMname " else "TYPEname ")+name)
- case NoSymbol =>
- print("NONEsym")
- case sym: Symbol if !isLocal(sym) =>
- if (sym.isModuleClass) {
- print("EXTMODCLASSref "); printRef(sym.name.toTermName)
- } else {
- print("EXTref "); printRef(sym.name)
- }
- if (!sym.owner.isRoot) printRef(sym.owner)
- case sym: ClassSymbol =>
- print("CLASSsym ")
- printSymInfo(sym)
- if (sym.thisSym.tpe != sym.tpe) printRef(sym.typeOfThis)
- case sym: TypeSymbol =>
- print(if (sym.isAbstractType) "TYPEsym " else "ALIASsym ")
- printSymInfo(sym)
- case sym: TermSymbol =>
- print(if (sym.isModule) "MODULEsym " else "VALsym ")
- printSymInfo(sym)
- if (sym.alias != NoSymbol) printRef(sym.alias)
- case NoType =>
- print("NOtpe")
- case NoPrefix =>
- print("NOPREFIXtpe")
- case ThisType(sym) =>
- print("THIStpe "); printRef(sym)
- case SingleType(pre, sym) =>
- print("SINGLEtpe "); printRef(pre); printRef(sym);
- case ConstantType(value) =>
- print("CONSTANTtpe "); printRef(value);
- case TypeRef(pre, sym, args) =>
- print("TYPEREFtpe "); printRef(pre); printRef(sym); printRefs(args);
- case TypeBounds(lo, hi) =>
- print("TYPEBOUNDStpe "); printRef(lo); printRef(hi);
- case tp @ RefinedType(parents, decls) =>
- print("REFINEDtpe "); printRef(tp.typeSymbol); printRefs(parents);
- case ClassInfoType(parents, decls, clazz) =>
- print("CLASSINFOtpe "); printRef(clazz); printRefs(parents);
- case mt @ MethodType(formals, restpe) =>
- print("METHODtpe"); printRef(restpe); printRefs(formals)
- case PolyType(tparams, restpe) =>
- print("POLYtpe "); printRef(restpe); printRefs(tparams);
- case ExistentialType(tparams, restpe) =>
- print("EXISTENTIALtpe "); printRef(restpe); printRefs(tparams);
- print("||| "+entry)
- // case DeBruijnIndex(l, i) =>
- // print("DEBRUIJNINDEXtpe "); print(l+" "+i)
- case c @ Constant(_) =>
- print("LITERAL ")
- if (c.tag == BooleanTag) print("Boolean "+(if (c.booleanValue) 1 else 0))
- else if (c.tag == ByteTag) print("Byte "+c.longValue)
- else if (c.tag == ShortTag) print("Short "+c.longValue)
- else if (c.tag == CharTag) print("Char "+c.longValue)
- else if (c.tag == IntTag) print("Int "+c.longValue)
- else if (c.tag == LongTag) print("Long "+c.longValue)
- else if (c.tag == FloatTag) print("Float "+c.floatValue)
- else if (c.tag == DoubleTag) print("Double "+c.doubleValue)
- else if (c.tag == StringTag) { print("String "); printRef(newTermName(c.stringValue)) }
- else if (c.tag == ClazzTag) { print("Class "); printRef(c.typeValue) }
- else if (c.tag == EnumTag) { print("Enum "); printRef(c.symbolValue) }
- case AnnotatedType(annots, tp, selfsym) =>
- if (settings.selfInAnnots.value) {
- print("ANNOTATEDWSELFtpe ")
- printRef(tp)
- printRef(selfsym)
- printRefs(annots)
- } else {
- print("ANNOTATEDtpe ")
- printRef(tp)
- printRefs(annots)
- }
- case (target: Symbol, AnnotationInfo(atp, args, Nil)) =>
- print("SYMANNOT ")
- printRef(target)
- printRef(atp)
- for (c <- args) printRef(c)
- case (target: Symbol, children: List[_]) =>
- print("CHILDREN ")
- printRef(target)
- for (c <- children) printRef(c.asInstanceOf[Symbol])
- case AnnotationInfo(atp, args, Nil) =>
- print("ANNOTINFO")
- printRef(atp)
- for (c <- args) printRef(c)
- case _ =>
- throw new FatalError("bad entry: " + entry + " " + entry.getClass)
- }
- printBody(entry); println()
- }
-
/** Write byte array */
def writeArray() {
assert(writeIndex == 0)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala b/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
deleted file mode 100644
index 40189b9444..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/CLRTypes.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.File
-import java.util.{Comparator, StringTokenizer}
-import scala.util.Sorting
-import ch.epfl.lamp.compiler.msil._
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.util.{Position, NoPosition}
-
-/**
- * Collects all types from all reference assemblies.
- */
-abstract class CLRTypes {
-
- val global: Global
- import global.Symbol
- import global.definitions
-
- //##########################################################################
-
- var BYTE: Type = _
- var UBYTE: Type = _
- var SHORT: Type = _
- var USHORT: Type = _
- var CHAR: Type = _
- var INT: Type = _
- var UINT: Type = _
- var LONG: Type = _
- var ULONG: Type = _
- var FLOAT: Type = _
- var DOUBLE: Type = _
- var BOOLEAN: Type = _
- var VOID: Type = _
- var ENUM: Type = _
- var DELEGATE: Type = _
-
- var OBJECT: Type = _
- var STRING: Type = _
- var STRING_ARRAY: Type = _
-
- var VALUE_TYPE: Type = _
-
- var SCALA_SYMTAB_ATTR: Type = _
- var SYMTAB_CONSTR: ConstructorInfo = _
- var SYMTAB_DEFAULT_CONSTR: ConstructorInfo = _
-
- var DELEGATE_COMBINE: MethodInfo = _
- var DELEGATE_REMOVE: MethodInfo = _
-
- val types: mutable.Map[Symbol,Type] = new mutable.HashMap
- val constructors: mutable.Map[Symbol,ConstructorInfo] = new mutable.HashMap
- val methods: mutable.Map[Symbol,MethodInfo] = new mutable.HashMap
- val fields: mutable.Map[Symbol, FieldInfo] = new mutable.HashMap
- val sym2type: mutable.Map[Type,Symbol] = new mutable.HashMap
- val addressOfViews = new mutable.HashSet[Symbol]
- val mdgptrcls4clssym: mutable.Map[ /*cls*/ Symbol, /*cls*/ Symbol] = new mutable.HashMap
-
- def isAddressOf(msym : Symbol) = addressOfViews.contains(msym)
-
- def isNonEnumValuetype(cls: Symbol) = {
- val msilTOpt = types.get(cls)
- val res = msilTOpt.isDefined && {
- val msilT = msilTOpt.get
- msilT.IsValueType && !msilT.IsEnum
- }
- res
- }
-
- def isValueType(cls: Symbol): Boolean = {
- val opt = types.get(cls)
- opt.isDefined && opt.get.IsValueType
- }
-
- def init() = try { // initialize
- // the MsilClasspath (nsc/util/Classpath.scala) initializes the msil-library by calling
- // Assembly.LoadFrom("mscorlib.dll"), so this type should be found
- Type.initMSCORLIB(getTypeSafe("System.String").Assembly)
-
- BYTE = getTypeSafe("System.SByte")
- UBYTE = getTypeSafe("System.Byte")
- CHAR = getTypeSafe("System.Char")
- SHORT = getTypeSafe("System.Int16")
- USHORT = getTypeSafe("System.UInt16")
- INT = getTypeSafe("System.Int32")
- UINT = getTypeSafe("System.UInt32")
- LONG = getTypeSafe("System.Int64")
- ULONG = getTypeSafe("System.UInt64")
- FLOAT = getTypeSafe("System.Single")
- DOUBLE = getTypeSafe("System.Double")
- BOOLEAN = getTypeSafe("System.Boolean")
- VOID = getTypeSafe("System.Void")
- ENUM = getTypeSafe("System.Enum")
- DELEGATE = getTypeSafe("System.MulticastDelegate")
-
- OBJECT = getTypeSafe("System.Object")
- STRING = getTypeSafe("System.String")
- STRING_ARRAY = getTypeSafe("System.String[]")
- VALUE_TYPE = getTypeSafe("System.ValueType")
-
- SCALA_SYMTAB_ATTR = getTypeSafe("scala.runtime.SymtabAttribute")
- val bytearray: Array[Type] = Array(Type.GetType("System.Byte[]"))
- SYMTAB_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(bytearray)
- SYMTAB_DEFAULT_CONSTR = SCALA_SYMTAB_ATTR.GetConstructor(Type.EmptyTypes)
-
- val delegate: Type = getTypeSafe("System.Delegate")
- val dargs: Array[Type] = Array(delegate, delegate)
- DELEGATE_COMBINE = delegate.GetMethod("Combine", dargs)
- DELEGATE_REMOVE = delegate.GetMethod("Remove", dargs)
- }
- catch {
- case e: RuntimeException =>
- Console.println(e.getMessage)
- throw e
- }
-
- //##########################################################################
- // type mapping and lookup
-
- def getType(name: String): Type = Type.GetType(name)
-
- def getTypeSafe(name: String): Type = {
- val t = Type.GetType(name)
- assert(t != null, name)
- t
- }
-
- def mkArrayType(elemType: Type): Type = getType(elemType.FullName + "[]")
-
- def isDelegateType(t: Type): Boolean = { t.BaseType() == DELEGATE }
-} // CLRTypes
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
deleted file mode 100644
index 5a0253c18b..0000000000
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ /dev/null
@@ -1,850 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2004-2013 LAMP/EPFL
- */
-
-package scala.tools.nsc
-package symtab
-package clr
-
-import java.io.IOException
-import io.MsilFile
-import ch.epfl.lamp.compiler.msil.{Type => MSILType, Attribute => MSILAttribute, _}
-import scala.collection.{ mutable, immutable }
-import scala.reflect.internal.pickling.UnPickler
-import ch.epfl.lamp.compiler.msil.Type.TMVarUsage
-import scala.language.implicitConversions
-
-/**
- * @author Nikolay Mihaylov
- */
-abstract class TypeParser {
-
- val global: Global
-
- import global._
- import loaders.clrTypes
-
- //##########################################################################
-
- private var clazz: Symbol = _
- private var instanceDefs: Scope = _ // was members
- private var staticModule: Symbol = _ // was staticsClass
- private var staticDefs: Scope = _ // was statics
-
- protected def statics: Symbol = staticModule.moduleClass
-
- protected var busy: Boolean = false // lock to detect recursive reads
-
- private object unpickler extends UnPickler {
- val global: TypeParser.this.global.type = TypeParser.this.global
- }
-
- def parse(typ: MSILType, root: Symbol) {
-
- def handleError(e: Throwable) = {
- if (settings.debug.value) e.printStackTrace() //debug
- throw new IOException("type '" + typ.FullName + "' is broken\n(" + e.getMessage() + ")")
- }
- assert(!busy)
- busy = true
-
- if (root.isModule) {
- this.clazz = root.companionClass
- this.staticModule = root
- } else {
- this.clazz = root
- this.staticModule = root.companionModule
- }
- try {
- parseClass(typ)
- } catch {
- case e: FatalError => handleError(e)
- case e: RuntimeException => handleError(e)
- }
- busy = false
- }
-
- class TypeParamsType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter {
- override def complete(sym: Symbol) { throw new AssertionError("cyclic type dereferencing") }
- }
-
- /* the names `classTParams` and `newTParams` stem from the forJVM version (ClassfileParser.sigToType())
- * but there are differences that should be kept in mind.
- * forMSIL, a nested class knows nothing about any type-params in the nesting class,
- * therefore newTParams is redundant (other than for recording lexical order),
- * it always contains the same elements as classTParams.value */
- val classTParams = scala.collection.mutable.Map[Int,Symbol]() // TODO should this be a stack? (i.e., is it possible for >1 invocation to getCLRType on the same TypeParser instance be active )
- val newTParams = new scala.collection.mutable.ListBuffer[Symbol]()
- val methodTParams = scala.collection.mutable.Map[Int,Symbol]()
-
- private def sig2typeBounds(tvarCILDef: GenericParamAndConstraints): Type = {
- val ts = new scala.collection.mutable.ListBuffer[Type]
- for (cnstrnt <- tvarCILDef.Constraints) {
- ts += getCLRType(cnstrnt) // TODO we're definitely not at or after erasure, no need to call objToAny, right?
- }
- TypeBounds.upper(intersectionType(ts.toList, clazz))
- // TODO variance???
- }
-
- private def createViewFromTo(viewSuffix : String, fromTpe : Type, toTpe : Type,
- addToboxMethodMap : Boolean, isAddressOf : Boolean) : Symbol = {
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
- val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(List(fromTpe)), toTpe)
- val vmsym = createMethod(nme.view_ + viewSuffix, flags, viewMethodType, null, true);
- // !!! this used to mutate a mutable map in definitions, but that map became
- // immutable and this kept "working" with a no-op. So now it's commented out
- // since I retired the deprecated code which allowed for that bug.
- //
- // if (addToboxMethodMap) definitions.boxMethod(clazz) = vmsym
-
- if (isAddressOf) clrTypes.addressOfViews += vmsym
- vmsym
- }
-
- private def createDefaultConstructor(typ: MSILType) {
- val attrs = MethodAttributes.Public | MethodAttributes.RTSpecialName | MethodAttributes.SpecialName // TODO instance
- val declType= typ
- val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
- val flags = Flags.JAVA
- val owner = clazz
- val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
- val rettype = clazz.tpe
- val mtype = methodType(Array[MSILType](), rettype);
- val mInfo = mtype(methodSym)
- methodSym.setInfo(mInfo)
- instanceDefs.enter(methodSym);
- clrTypes.constructors(methodSym) = method
- }
-
- private def parseClass(typ: MSILType) {
-
- {
- val t4c = clrTypes.types.get(clazz)
- assert(t4c == None || t4c == Some(typ))
- }
- clrTypes.types(clazz) = typ
-
- {
- val c4t = clrTypes.sym2type.get(typ)
- assert(c4t == None || c4t == Some(clazz))
- }
- clrTypes.sym2type(typ) = clazz
-
- if (typ.IsDefined(clrTypes.SCALA_SYMTAB_ATTR, false)) {
- val attrs = typ.GetCustomAttributes(clrTypes.SCALA_SYMTAB_ATTR, false);
- assert (attrs.length == 1, attrs.length);
- val a = attrs(0).asInstanceOf[MSILAttribute];
- assert (a.getConstructor() == clrTypes.SYMTAB_CONSTR);
- val symtab = a.getConstructorArguments()(0).asInstanceOf[Array[Byte]]
- unpickler.unpickle(symtab, 0, clazz, staticModule, typ.FullName);
- val mClass = clrTypes.getType(typ.FullName + "$");
- if (mClass != null) {
- clrTypes.types(statics) = mClass;
- val moduleInstance = mClass.GetField("MODULE$");
- assert (moduleInstance != null, mClass);
- clrTypes.fields(statics) = moduleInstance;
- }
- return
- }
- val flags = translateAttributes(typ)
-
- var clazzBoxed : Symbol = NoSymbol
- var clazzMgdPtr : Symbol = NoSymbol
-
- val canBeTakenAddressOf = (typ.IsValueType || typ.IsEnum) && (typ.FullName != "System.Enum")
-
- if(canBeTakenAddressOf) {
- clazzBoxed = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("Boxed"))
- clazzMgdPtr = clazz.owner.newClass(clazz.name.toTypeName append newTypeName("MgdPtr"))
- clrTypes.mdgptrcls4clssym(clazz) = clazzMgdPtr
- /* adding typMgdPtr to clrTypes.sym2type should happen early (before metadata for supertypes is parsed,
- before metadata for members are parsed) so that clazzMgdPtr can be found by getClRType. */
- val typMgdPtr = MSILType.mkByRef(typ)
- clrTypes.types(clazzMgdPtr) = typMgdPtr
- clrTypes.sym2type(typMgdPtr) = clazzMgdPtr
- /* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance,
- because there's no metadata-level representation for a "boxed valuetype" */
- val instanceDefsMgdPtr = newScope
- val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr)
- clazzMgdPtr.setFlag(flags)
- clazzMgdPtr.setInfo(classInfoMgdPtr)
- }
-
-/* START CLR generics (snippet 1) */
- // first pass
- for (tvarCILDef <- typ.getSortedTVars() ) {
- val tpname = newTypeName(tvarCILDef.Name.replaceAll("!", "")) // TODO are really all type-params named in all assemblies out there? (NO)
- val tpsym = clazz.newTypeParameter(tpname)
- classTParams.put(tvarCILDef.Number, tpsym)
- newTParams += tpsym
- // TODO wouldn't the following also be needed later, i.e. during getCLRType
- tpsym.setInfo(definitions.AnyClass.tpe)
- }
- // second pass
- for (tvarCILDef <- typ.getSortedTVars() ) {
- val tpsym = classTParams(tvarCILDef.Number)
- tpsym.setInfo(sig2typeBounds(tvarCILDef)) // we never skip bounds unlike in forJVM
- }
-/* END CLR generics (snippet 1) */
- val ownTypeParams = newTParams.toList
-/* START CLR generics (snippet 2) */
- if (!ownTypeParams.isEmpty) {
- clazz.setInfo(new TypeParamsType(ownTypeParams))
- if(typ.IsValueType && !typ.IsEnum) {
- clazzBoxed.setInfo(new TypeParamsType(ownTypeParams))
- }
- }
-/* END CLR generics (snippet 2) */
- instanceDefs = newScope
- staticDefs = newScope
-
- val classInfoAsInMetadata = {
- val ifaces: Array[MSILType] = typ.getInterfaces()
- val superType = if (typ.BaseType() != null) getCLRType(typ.BaseType())
- else if (typ.IsInterface()) definitions.ObjectClass.tpe
- else definitions.AnyClass.tpe; // this branch activates for System.Object only.
- // parents (i.e., base type and interfaces)
- val parents = new scala.collection.mutable.ListBuffer[Type]()
- parents += superType
- for (iface <- ifaces) {
- parents += getCLRType(iface) // here the variance doesn't matter
- }
- // methods, properties, events, fields are entered in a moment
- if (canBeTakenAddressOf) {
- val instanceDefsBoxed = newScope
- ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed)
- } else
- ClassInfoType(parents.toList, instanceDefs, clazz)
- }
-
- val staticInfo = ClassInfoType(List(), staticDefs, statics)
-
- clazz.setFlag(flags)
-
- if (canBeTakenAddressOf) {
- clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else genPolyType(ownTypeParams, classInfoAsInMetadata) )
- clazzBoxed.setFlag(flags)
- val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
- clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
- else genPolyType(ownTypeParams, rawValueInfoType) )
- } else {
- clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else genPolyType(ownTypeParams, classInfoAsInMetadata) )
- }
-
- // TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
- statics.setFlag(Flags.JAVA)
- statics.setInfo(staticInfo)
- staticModule.setFlag(Flags.JAVA)
- staticModule.setInfo(statics.tpe)
-
-
- if (canBeTakenAddressOf) {
- // implicit conversions are owned by staticModule.moduleClass
- createViewFromTo("2Boxed", clazz.tpe, clazzBoxed.tpe, addToboxMethodMap = true, isAddressOf = false)
- // createViewFromTo("2Object", clazz.tpe, definitions.ObjectClass.tpe, addToboxMethodMap = true, isAddressOf = false)
- createViewFromTo("2MgdPtr", clazz.tpe, clazzMgdPtr.tpe, addToboxMethodMap = false, isAddressOf = true)
- // a return can't have type managed-pointer, thus a dereference-conversion is not needed
- // similarly, a method can't declare as return type "boxed valuetype"
- if (!typ.IsEnum) {
- // a synthetic default constructor for raw-type allows `new X' syntax
- createDefaultConstructor(typ)
- }
- }
-
- // import nested types
- for (ntype <- typ.getNestedTypes() if !(ntype.IsNestedPrivate || ntype.IsNestedAssembly || ntype.IsNestedFamANDAssem)
- || ntype.IsInterface /* TODO why shouldn't nested ifaces be type-parsed too? */ )
- {
- val loader = new loaders.MsilFileLoader(new MsilFile(ntype))
- val nclazz = statics.newClass(ntype.Name)
- val nmodule = statics.newModule(ntype.Name)
- nclazz.setInfo(loader)
- nmodule.setInfo(loader)
- staticDefs.enter(nclazz)
- staticDefs.enter(nmodule)
-
- assert(nclazz.companionModule == nmodule, nmodule)
- assert(nmodule.companionClass == nclazz, nclazz)
- }
-
- val fields = typ.getFields()
- for (field <- fields
- if !(field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly)
- if (getCLRType(field.FieldType) != null)
- ) {
- assert (!field.FieldType.IsPointer && !field.FieldType.IsByRef, "CLR requirement")
- val flags = translateAttributes(field);
- val name = newTermName(field.Name);
- val fieldType =
- if (field.IsLiteral && !field.FieldType.IsEnum && isDefinedAtgetConstant(getCLRType(field.FieldType)))
- ConstantType(getConstant(getCLRType(field.FieldType), field.getValue))
- else
- getCLRType(field.FieldType)
- val owner = if (field.IsStatic()) statics else clazz;
- val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
- // TODO: set private within!!! -> look at typechecker/Namers.scala
- (if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
- clrTypes.fields(sym) = field;
- }
-
- for (constr <- typ.getConstructors() if !constr.IsStatic() && !constr.IsPrivate() &&
- !constr.IsAssembly() && !constr.IsFamilyAndAssembly() && !constr.HasPtrParamOrRetType())
- createMethod(constr);
-
- // initially also contains getters and setters of properties.
- val methodsSet = new mutable.HashSet[MethodInfo]();
- methodsSet ++= typ.getMethods();
-
- for (prop <- typ.getProperties) {
- val propType: Type = getCLSType(prop.PropertyType);
- if (propType != null) {
- val getter: MethodInfo = prop.GetGetMethod(true);
- val setter: MethodInfo = prop.GetSetMethod(true);
- var gparamsLength: Int = -1;
- if (!(getter == null || getter.IsPrivate || getter.IsAssembly
- || getter.IsFamilyAndAssembly || getter.HasPtrParamOrRetType))
- {
- assert(prop.PropertyType == getter.ReturnType);
- val gparams: Array[ParameterInfo] = getter.GetParameters();
- gparamsLength = gparams.length;
- val name: TermName = if (gparamsLength == 0) prop.Name else nme.apply;
- val flags = translateAttributes(getter);
- val owner: Symbol = if (getter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(name, NoPosition, flags)
- val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
- else methodType(getter, getter.ReturnType)(methodSym)
- methodSym.setInfo(mtype);
- methodSym.setFlag(Flags.ACCESSOR);
- (if (getter.IsStatic) staticDefs else instanceDefs).enter(methodSym)
- clrTypes.methods(methodSym) = getter;
- methodsSet -= getter;
- }
- if (!(setter == null || setter.IsPrivate || setter.IsAssembly
- || setter.IsFamilyAndAssembly || setter.HasPtrParamOrRetType))
- {
- val sparams: Array[ParameterInfo] = setter.GetParameters()
- if(getter != null)
- assert(getter.IsStatic == setter.IsStatic);
- assert(setter.ReturnType == clrTypes.VOID);
- if(getter != null)
- assert(sparams.length == gparamsLength + 1, "" + getter + "; " + setter);
-
- val name: TermName = if (gparamsLength == 0) nme.getterToSetter(prop.Name)
- else nme.update;
- val flags = translateAttributes(setter);
- val mtype = methodType(setter, definitions.UnitClass.tpe);
- val owner: Symbol = if (setter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(name, NoPosition, flags)
- methodSym.setInfo(mtype(methodSym))
- methodSym.setFlag(Flags.ACCESSOR);
- (if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
- clrTypes.methods(methodSym) = setter;
- methodsSet -= setter;
- }
- }
- }
-
-/* for (event <- typ.GetEvents) {
- // adding += and -= methods to add delegates to an event.
- // raising the event ist not possible from outside the class (this is so
- // generally in .net world)
- val adder: MethodInfo = event.GetAddMethod();
- val remover: MethodInfo = event.GetRemoveMethod();
- if (!(adder == null || adder.IsPrivate || adder.IsAssembly
- || adder.IsFamilyAndAssembly))
- {
- assert(adder.ReturnType == clrTypes.VOID);
- assert(adder.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
- val name = encode("+=");
- val flags = translateAttributes(adder);
- val mtype: Type = methodType(adder, adder.ReturnType);
- createMethod(name, flags, mtype, adder, adder.IsStatic)
- methodsSet -= adder;
- }
- if (!(remover == null || remover.IsPrivate || remover.IsAssembly
- || remover.IsFamilyAndAssembly))
- {
- assert(remover.ReturnType == clrTypes.VOID);
- assert(remover.GetParameters().map(_.ParameterType).toList == List(event.EventHandlerType));
- val name = encode("-=");
- val flags = translateAttributes(remover);
- val mtype: Type = methodType(remover, remover.ReturnType);
- createMethod(name, flags, mtype, remover, remover.IsStatic)
- methodsSet -= remover;
- }
- } */
-
-/* Adds view amounting to syntax sugar for a CLR implicit overload.
- The long-form syntax can also be supported if "methodsSet -= method" (last statement) is removed.
-
- /* remember, there's typ.getMethods and type.GetMethods */
- for (method <- typ.getMethods)
- if(!method.HasPtrParamOrRetType &&
- method.IsPublic && method.IsStatic && method.IsSpecialName &&
- method.Name == "op_Implicit") {
- // create a view: typ => method's return type
- val viewRetType: Type = getCLRType(method.ReturnType)
- val viewParamTypes: List[Type] = method.GetParameters().map(_.ParameterType).map(getCLSType).toList;
- /* The spec says "The operator method shall be defined as a static method on either the operand or return type."
- * We don't consider the declaring type for the purposes of definitions.functionType,
- * instead we regard op_Implicit's argument type and return type as defining the view's signature.
- */
- if (viewRetType != null && !viewParamTypes.contains(null)) {
- /* The check above applies e.g. to System.Decimal that has a conversion from UInt16, a non-CLS type, whose CLS-mapping returns null */
- val funType: Type = definitions.functionType(viewParamTypes, viewRetType);
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? shouldn't be final instead?
- val viewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(viewParamTypes), funType)
- val vmsym = createMethod(nme.view_, flags, viewMethodType, method, true);
- methodsSet -= method;
- }
- }
-*/
-
- for (method <- methodsSet.iterator)
- if (!method.IsPrivate() && !method.IsAssembly() && !method.IsFamilyAndAssembly()
- && !method.HasPtrParamOrRetType)
- createMethod(method);
-
- // Create methods and views for delegate support
- if (clrTypes.isDelegateType(typ)) {
- createDelegateView(typ)
- createDelegateChainers(typ)
- }
-
- // for enumerations introduce comparison and bitwise logical operations;
- // the backend will recognize them and replace them with comparison or
- // bitwise logical operations on the primitive underlying type
-
- if (typ.IsEnum) {
- val ENUM_CMP_NAMES = List(nme.EQ, nme.NE, nme.LT, nme.LE, nme.GT, nme.GE);
- val ENUM_BIT_LOG_NAMES = List(nme.OR, nme.AND, nme.XOR);
-
- val flags = Flags.JAVA | Flags.FINAL
- for (cmpName <- ENUM_CMP_NAMES) {
- val enumCmp = clazz.newMethod(cmpName)
- val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
- enumCmp.setFlag(flags).setInfo(enumCmpType)
- instanceDefs.enter(enumCmp)
- }
-
- for (bitLogName <- ENUM_BIT_LOG_NAMES) {
- val enumBitLog = clazz.newMethod(bitLogName)
- val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
- enumBitLog.setFlag(flags).setInfo(enumBitLogType)
- instanceDefs.enter(enumBitLog)
- }
- }
-
- } // parseClass
-
- private def populateMethodTParams(method: MethodBase, methodSym: MethodSymbol) : List[Symbol] = {
- if(!method.IsGeneric) Nil
- else {
- methodTParams.clear
- val newMethodTParams = new scala.collection.mutable.ListBuffer[Symbol]()
-
- // first pass
- for (mvarCILDef <- method.getSortedMVars() ) {
- val mtpname = newTypeName(mvarCILDef.Name.replaceAll("!", "")) // TODO are really all method-level-type-params named in all assemblies out there? (NO)
- val mtpsym = methodSym.newTypeParameter(mtpname)
- methodTParams.put(mvarCILDef.Number, mtpsym)
- newMethodTParams += mtpsym
- // TODO wouldn't the following also be needed later, i.e. during getCLRType
- mtpsym.setInfo(definitions.AnyClass.tpe)
- }
- // second pass
- for (mvarCILDef <- method.getSortedMVars() ) {
- val mtpsym = methodTParams(mvarCILDef.Number)
- mtpsym.setInfo(sig2typeBounds(mvarCILDef)) // we never skip bounds unlike in forJVM
- }
-
- newMethodTParams.toList
- }
- }
-
- private def createMethod(method: MethodBase) {
-
- val flags = translateAttributes(method);
- val owner = if (method.IsStatic()) statics else clazz;
- val methodSym = owner.newMethod(getName(method), NoPosition, flags)
- /* START CLR generics (snippet 3) */
- val newMethodTParams = populateMethodTParams(method, methodSym)
- /* END CLR generics (snippet 3) */
-
- val rettype = if (method.IsConstructor()) clazz.tpe
- else getCLSType(method.asInstanceOf[MethodInfo].ReturnType);
- if (rettype == null) return;
- val mtype = methodType(method, rettype);
- if (mtype == null) return;
-/* START CLR generics (snippet 4) */
- val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
- else mtype(methodSym)
-/* END CLR generics (snippet 4) */
-/* START CLR non-generics (snippet 4)
- val mInfo = mtype(methodSym)
- END CLR non-generics (snippet 4) */
- methodSym.setInfo(mInfo)
- (if (method.IsStatic()) staticDefs else instanceDefs).enter(methodSym);
- if (method.IsConstructor())
- clrTypes.constructors(methodSym) = method.asInstanceOf[ConstructorInfo]
- else clrTypes.methods(methodSym) = method.asInstanceOf[MethodInfo];
- }
-
- private def createMethod(name: TermName, flags: Long, args: Array[MSILType], retType: MSILType, method: MethodInfo, statik: Boolean): Symbol = {
- val mtype = methodType(args, getCLSType(retType))
- assert(mtype != null)
- createMethod(name, flags, mtype, method, statik)
- }
-
- private def createMethod(name: TermName, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
- val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name)
- methodSym.setFlag(flags).setInfo(mtype(methodSym))
- (if (statik) staticDefs else instanceDefs).enter(methodSym)
- if (method != null)
- clrTypes.methods(methodSym) = method
- methodSym
- }
-
- private def createDelegateView(typ: MSILType) = {
- val invoke: MethodInfo = typ.GetMember("Invoke")(0).asInstanceOf[MethodInfo];
- val invokeRetType: Type = getCLRType(invoke.ReturnType);
- val invokeParamTypes: List[Type] =invoke.GetParameters().map(_.ParameterType).map(getCLSType).toList;
- val funType: Type = definitions.functionType(invokeParamTypes, invokeRetType);
-
- val typClrType: Type = getCLRType(typ);
- val flags = Flags.JAVA | Flags.STATIC | Flags.IMPLICIT; // todo: static? think not needed
-
- // create the forward view: delegate => function
- val delegateParamTypes: List[Type] = List(typClrType);
- // not ImplicitMethodType, this is for methods with implicit parameters (not implicit methods)
- val forwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(delegateParamTypes), funType)
- val fmsym = createMethod(nme.view_, flags, forwardViewMethodType, null, true);
-
- // create the backward view: function => delegate
- val functionParamTypes: List[Type] = List(funType);
- val backwardViewMethodType = (msym: Symbol) => JavaMethodType(msym.newSyntheticValueParams(functionParamTypes), typClrType)
- val bmsym = createMethod(nme.view_, flags, backwardViewMethodType, null, true);
- }
-
- private def createDelegateChainers(typ: MSILType) = {
- val flags: Long = Flags.JAVA | Flags.FINAL
- val args: Array[MSILType] = Array(typ)
-
- var s = createMethod(encode("+="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_COMBINE, false);
- s = createMethod(encode("-="), flags, args, clrTypes.VOID, clrTypes.DELEGATE_REMOVE, false);
-
- s = createMethod(nme.PLUS, flags, args, typ, clrTypes.DELEGATE_COMBINE, false);
- s = createMethod(nme.MINUS, flags, args, typ, clrTypes.DELEGATE_REMOVE, false);
- }
-
- private def getName(method: MethodBase): TermName = {
-
- def operatorOverload(name : String, paramsArity : Int) : Option[Name] = paramsArity match {
- case 1 => name match {
- // PartitionI.10.3.1
- case "op_Decrement" => Some(encode("--"))
- case "op_Increment" => Some(encode("++"))
- case "op_UnaryNegation" => Some(nme.UNARY_-)
- case "op_UnaryPlus" => Some(nme.UNARY_+)
- case "op_LogicalNot" => Some(nme.UNARY_!)
- case "op_OnesComplement" => Some(nme.UNARY_~)
- /* op_True and op_False have no operator symbol assigned,
- Other methods that will have to be written in full are:
- op_AddressOf & (unary)
- op_PointerDereference * (unary) */
- case _ => None
- }
- case 2 => name match {
- // PartitionI.10.3.2
- case "op_Addition" => Some(nme.ADD)
- case "op_Subtraction" => Some(nme.SUB)
- case "op_Multiply" => Some(nme.MUL)
- case "op_Division" => Some(nme.DIV)
- case "op_Modulus" => Some(nme.MOD)
- case "op_ExclusiveOr" => Some(nme.XOR)
- case "op_BitwiseAnd" => Some(nme.AND)
- case "op_BitwiseOr" => Some(nme.OR)
- case "op_LogicalAnd" => Some(nme.ZAND)
- case "op_LogicalOr" => Some(nme.ZOR)
- case "op_LeftShift" => Some(nme.LSL)
- case "op_RightShift" => Some(nme.ASR)
- case "op_Equality" => Some(nme.EQ)
- case "op_GreaterThan" => Some(nme.GT)
- case "op_LessThan" => Some(nme.LT)
- case "op_Inequality" => Some(nme.NE)
- case "op_GreaterThanOrEqual" => Some(nme.GE)
- case "op_LessThanOrEqual" => Some(nme.LE)
-
- /* op_MemberSelection is reserved in Scala */
-
- /* The standard does not assign operator symbols to op_Assign , op_SignedRightShift , op_UnsignedRightShift ,
- * and op_UnsignedRightShiftAssignment so those names will be used instead to invoke those methods. */
-
- /*
- The remaining binary operators are not overloaded in C# and are therefore not in widespread use. They have to be written in full.
-
- op_RightShiftAssignment >>=
- op_MultiplicationAssignment *=
- op_PointerToMemberSelection ->*
- op_SubtractionAssignment -=
- op_ExclusiveOrAssignment ^=
- op_LeftShiftAssignment <<=
- op_ModulusAssignment %=
- op_AdditionAssignment +=
- op_BitwiseAndAssignment &=
- op_BitwiseOrAssignment |=
- op_Comma ,
- op_DivisionAssignment /=
- */
- case _ => None
- }
- case _ => None
- }
-
- if (method.IsConstructor()) return nme.CONSTRUCTOR;
- val name = method.Name;
- if (method.IsStatic()) {
- if(method.IsSpecialName) {
- val paramsArity = method.GetParameters().size
- // handle operator overload, otherwise handle as any static method
- val operName = operatorOverload(name, paramsArity)
- if (operName.isDefined) { return operName.get; }
- }
- return newTermName(name);
- }
- val params = method.GetParameters();
- name match {
- case "GetHashCode" if (params.length == 0) => nme.hashCode_;
- case "ToString" if (params.length == 0) => nme.toString_;
- case "Finalize" if (params.length == 0) => nme.finalize_;
- case "Equals" if (params.length == 1 && params(0).ParameterType == clrTypes.OBJECT) =>
- nme.equals_;
- case "Invoke" if (clrTypes.isDelegateType(method.DeclaringType)) => nme.apply;
- case _ => newTermName(name);
- }
- }
-
- //##########################################################################
-
- private def methodType(method: MethodBase, rettype: MSILType): Symbol => Type = {
- val rtype = getCLSType(rettype);
- if (rtype == null) null else methodType(method, rtype);
- }
-
- /** Return a method type for the given method. */
- private def methodType(method: MethodBase, rettype: Type): Symbol => Type =
- methodType(method.GetParameters().map(_.ParameterType), rettype);
-
- /** Return a method type for the provided argument types and return type. */
- private def methodType(argtypes: Array[MSILType], rettype: Type): Symbol => Type = {
- def paramType(typ: MSILType): Type =
- if (typ eq clrTypes.OBJECT) definitions.AnyClass.tpe // TODO a hack to compile scalalib, should be definitions.AnyRefClass.tpe
- else getCLSType(typ);
- val ptypes = argtypes.map(paramType).toList;
- if (ptypes.contains(null)) null
- else method => JavaMethodType(method.newSyntheticValueParams(ptypes), rettype);
- }
-
- //##########################################################################
-
- private def getClassType(typ: MSILType): Type = {
- assert(typ != null);
- val res = rootMirror.getClassByName(typ.FullName.replace('+', '.') : TypeName).tpe;
- //if (res.isError())
- // global.reporter.error("unknown class reference " + type.FullName);
- res
- }
-
- private def getCLSType(typ: MSILType): Type = { // getCLS returns non-null for types GenMSIL can handle, be they CLS-compliant or not
- if (typ.IsTMVarUsage())
- /* START CLR generics (snippet 5) */
- getCLRType(typ)
- /* END CLR generics (snippet 5) */
- /* START CLR non-generics (snippet 5)
- null
- END CLR non-generics (snippet 5) */
- else if ( /* TODO hack if UBYE, uncommented, "ambiguous reference to overloaded definition" ensues, for example for System.Math.Max(x, y) */
- typ == clrTypes.USHORT || typ == clrTypes.UINT || typ == clrTypes.ULONG
- /* || typ == clrTypes.UBYTE */
- || typ.IsNotPublic() || typ.IsNestedPrivate()
- || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem()
- || typ.IsPointer()
- || (typ.IsArray() && getCLRType(typ.GetElementType()) == null) /* TODO hack: getCLR instead of getCLS */
- || (typ.IsByRef() && !typ.GetElementType().CanBeTakenAddressOf()))
- null
- else
- getCLRType(typ)
- }
-
- private def getCLRTypeIfPrimitiveNullOtherwise(typ: MSILType): Type =
- if (typ == clrTypes.OBJECT)
- definitions.ObjectClass.tpe;
- else if (typ == clrTypes.VALUE_TYPE)
- definitions.AnyValClass.tpe
- else if (typ == clrTypes.STRING)
- definitions.StringClass.tpe;
- else if (typ == clrTypes.VOID)
- definitions.UnitClass.tpe
- else if (typ == clrTypes.BOOLEAN)
- definitions.BooleanClass.tpe
- else if (typ == clrTypes.CHAR)
- definitions.CharClass.tpe
- else if ((typ == clrTypes.BYTE) || (typ == clrTypes.UBYTE)) // TODO U... is a hack to compile scalalib
- definitions.ByteClass.tpe
- else if ((typ == clrTypes.SHORT) || (typ == clrTypes.SHORT)) // TODO U... is a hack to compile scalalib
- definitions.ShortClass.tpe
- else if ((typ == clrTypes.INT) || (typ == clrTypes.UINT)) // TODO U... is a hack to compile scalalib
- definitions.IntClass.tpe
- else if ((typ == clrTypes.LONG) || (typ == clrTypes.LONG)) // TODO U... is a hack to compile scalalib
- definitions.LongClass.tpe
- else if (typ == clrTypes.FLOAT)
- definitions.FloatClass.tpe
- else if (typ == clrTypes.DOUBLE)
- definitions.DoubleClass.tpe
- else null
-
-
- private def getCLRType(tMSIL: MSILType): Type = {
- var res = getCLRTypeIfPrimitiveNullOtherwise(tMSIL)
- if (res != null) res
- else if (tMSIL.isInstanceOf[ConstructedType]) {
- val ct = tMSIL.asInstanceOf[ConstructedType]
- /* START CLR generics (snippet 6) */
- val cttpArgs = ct.typeArgs.map(tmsil => getCLRType(tmsil)).toList
- appliedType(getCLRType(ct.instantiatedType), cttpArgs)
- /* END CLR generics (snippet 6) */
- /* START CLR non-generics (snippet 6)
- getCLRType(ct.instantiatedType)
- END CLR non-generics (snippet 6) */
- } else if (tMSIL.isInstanceOf[TMVarUsage]) {
- /* START CLR generics (snippet 7) */
- val tVarUsage = tMSIL.asInstanceOf[TMVarUsage]
- val tVarNumber = tVarUsage.Number
- if (tVarUsage.isTVar) classTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
- else methodTParams(tVarNumber).typeConstructor // shouldn't fail, just return definitions.AnyClass.tpe at worst
- /* END CLR generics (snippet 7) */
- /* START CLR non-generics (snippet 7)
- null // definitions.ObjectClass.tpe
- END CLR non-generics (snippet 7) */
- } else if (tMSIL.IsArray()) {
- var elemtp = getCLRType(tMSIL.GetElementType())
- // cut&pasted from ClassfileParser
- // make unbounded Array[T] where T is a type variable into Array[T with Object]
- // (this is necessary because such arrays have a representation which is incompatible
- // with arrays of primitive types).
- // TODO does that incompatibility also apply to .NET?
- if (elemtp.typeSymbol.isAbstractType && !(elemtp <:< definitions.ObjectClass.tpe))
- elemtp = intersectionType(List(elemtp, definitions.ObjectClass.tpe))
- appliedType(definitions.ArrayClass.tpe, List(elemtp))
- } else {
- res = clrTypes.sym2type.get(tMSIL) match {
- case Some(sym) => sym.tpe
- case None => if (tMSIL.IsByRef && tMSIL.GetElementType.IsValueType) {
- val addressed = getCLRType(tMSIL.GetElementType)
- val clasym = addressed.typeSymbolDirect // TODO should be .typeSymbol?
- clasym.info.load(clasym)
- val secondAttempt = clrTypes.sym2type.get(tMSIL)
- secondAttempt match { case Some(sym) => sym.tpe
- case None => null
- }
- } else getClassType(tMSIL)
- }
- if (res == null)
- null // TODO new RuntimeException()
- else res
- }
- }
-
- // the values are Java-Box-Classes (e.g. Integer, Boolean, Character)
- // java.lang.Number to get the value (if a number, not for boolean, character)
- // see ch.epfl.lamp.compiler.msil.util.PEStream.java
- def getConstant(constType: Type, value: Object): Constant = {
- val typeClass = constType.typeSymbol
- if (typeClass == definitions.BooleanClass)
- Constant(value.asInstanceOf[java.lang.Boolean].booleanValue)
- else if (typeClass == definitions.ByteClass)
- Constant(value.asInstanceOf[java.lang.Number].byteValue)
- else if (typeClass == definitions.ShortClass)
- Constant(value.asInstanceOf[java.lang.Number].shortValue)
- else if (typeClass == definitions.CharClass)
- Constant(value.asInstanceOf[java.lang.Character].charValue)
- else if (typeClass == definitions.IntClass)
- Constant(value.asInstanceOf[java.lang.Number].intValue)
- else if (typeClass == definitions.LongClass)
- Constant(value.asInstanceOf[java.lang.Number].longValue)
- else if (typeClass == definitions.FloatClass)
- Constant(value.asInstanceOf[java.lang.Number].floatValue)
- else if (typeClass == definitions.DoubleClass)
- Constant(value.asInstanceOf[java.lang.Number].doubleValue)
- else if (typeClass == definitions.StringClass)
- Constant(value.asInstanceOf[java.lang.String])
- else
- abort("illegal value: " + value + ", class-symbol: " + typeClass)
- }
-
- def isDefinedAtgetConstant(constType: Type): Boolean = {
- val typeClass = constType.typeSymbol
- if ( (typeClass == definitions.BooleanClass)
- || (typeClass == definitions.ByteClass)
- || (typeClass == definitions.ShortClass)
- || (typeClass == definitions.CharClass)
- || (typeClass == definitions.IntClass)
- || (typeClass == definitions.LongClass)
- || (typeClass == definitions.FloatClass)
- || (typeClass == definitions.DoubleClass)
- || (typeClass == definitions.StringClass)
- )
- true
- else
- false
- }
-
- private def translateAttributes(typ: MSILType): Long = {
- var flags: Long = Flags.JAVA;
- if (typ.IsNotPublic() || typ.IsNestedPrivate()
- || typ.IsNestedAssembly() || typ.IsNestedFamANDAssem())
- flags = flags | Flags.PRIVATE;
- else if (typ.IsNestedFamily() || typ.IsNestedFamORAssem())
- flags = flags | Flags.PROTECTED;
- if (typ.IsAbstract())
- flags = flags | Flags.ABSTRACT;
- if (typ.IsSealed())
- flags = flags | Flags.FINAL;
- if (typ.IsInterface())
- flags = flags | Flags.INTERFACE | Flags.TRAIT | Flags.ABSTRACT;
-
- flags
- }
-
- private def translateAttributes(field: FieldInfo): Long = {
- var flags: Long = Flags.JAVA;
- if (field.IsPrivate() || field.IsAssembly() || field.IsFamilyAndAssembly())
- flags = flags | Flags.PRIVATE;
- else if (field.IsFamily() || field.IsFamilyOrAssembly())
- flags = flags | Flags.PROTECTED;
- if (field.IsInitOnly() || field.IsLiteral())
- flags = flags | Flags.FINAL;
- else
- flags = flags | Flags.MUTABLE;
- if (field.IsStatic)
- flags = flags | Flags.STATIC
-
- flags
- }
-
- private def translateAttributes(method: MethodBase): Long = {
- var flags: Long = Flags.JAVA;
- if (method.IsPrivate() || method.IsAssembly() || method.IsFamilyAndAssembly())
- flags = flags | Flags.PRIVATE;
- else if (method.IsFamily() || method.IsFamilyOrAssembly())
- flags = flags | Flags.PROTECTED;
- if (method.IsAbstract())
- flags = flags | Flags.DEFERRED;
- if (method.IsStatic)
- flags = flags | Flags.STATIC
-
- flags
- }
-}
diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
index bacd8c39e1..0dcf4d00b7 100644
--- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
+++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala
@@ -8,8 +8,6 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
abstract class AddInterfaces extends InfoTransform { self: Erasure =>
import global._ // the global environment
@@ -94,7 +92,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
impl.typeOfThis = iface.typeOfThis
impl.thisSym setName iface.thisSym.name
}
- impl.sourceFile = iface.sourceFile
+ impl.associatedFile = iface.sourceFile
if (inClass)
iface.owner.info.decls enter impl
@@ -111,7 +109,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
def implClass(iface: Symbol): Symbol = {
iface.info
- implClassMap.getOrElse(iface, atPhase(implClassPhase) {
+ implClassMap.getOrElse(iface, enteringPhase(implClassPhase) {
if (iface.implClass eq NoSymbol)
debuglog(s"${iface.fullLocationString} has no implClass yet, creating it now.")
else
@@ -145,7 +143,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
decls enter (
implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos)
- setInfo MethodType(Nil, UnitClass.tpe)
+ setInfo MethodType(Nil, UnitTpe)
)
}
@@ -176,8 +174,8 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
override def complete(implSym: Symbol) {
debuglog("LazyImplClassType completing " + implSym)
- /** If `tp` refers to a non-interface trait, return a
- * reference to its implementation class. Otherwise return `tp`.
+ /* If `tp` refers to a non-interface trait, return a
+ * reference to its implementation class. Otherwise return `tp`.
*/
def mixinToImplClass(tp: Type): Type = AddInterfaces.this.erasure(implSym) {
tp match { //@MATN: no normalize needed (comes after erasure)
@@ -191,12 +189,12 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case ClassInfoType(parents, decls, _) =>
assert(phase == implClassPhase, tp)
// Impl class parents: Object first, matching interface last.
- val implParents = ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
+ val implParents = ObjectTpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe
ClassInfoType(implParents, implDecls(implSym, decls), implSym)
case PolyType(_, restpe) =>
implType(restpe)
}
- implSym setInfo implType(beforeErasure(iface.info))
+ implSym setInfo implType(enteringErasure(iface.info))
}
override def load(clazz: Symbol) { complete(clazz) }
@@ -211,7 +209,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
case Nil => Nil
case hd :: tl =>
assert(!hd.typeSymbol.isTrait, clazz)
- if (clazz.isTrait) erasedTypeRef(ObjectClass) :: tl
+ if (clazz.isTrait) ObjectTpe :: tl
else parents
}
val decls1 = scopeTransform(clazz)(
@@ -278,7 +276,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
*/
private def addMixinConstructorDef(clazz: Symbol, stats: List[Tree]): List[Tree] =
if (treeInfo.firstConstructor(stats) != EmptyTree) stats
- else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant()))) :: stats
+ else DefDef(clazz.primaryConstructor, Block(List(), Literal(Constant(())))) :: stats
private def implTemplate(clazz: Symbol, templ: Template): Template = atPos(templ.pos) {
val templ1 = (
@@ -317,10 +315,10 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
// body until now, because the typer knows that Any has no
// constructor and won't accept a call to super.init.
assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz)
- Block(List(Apply(gen.mkSuperSelect, Nil)), expr)
+ Block(List(Apply(gen.mkSuperInitCall, Nil)), expr)
case Block(stats, expr) =>
- // needs `hasSymbol` check because `supercall` could be a block (named / default args)
+ // needs `hasSymbolField` check because `supercall` could be a block (named / default args)
val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER))
treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr)
}
@@ -352,7 +350,7 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure =>
val mix1 = mix
if (mix == tpnme.EMPTY) mix
else {
- val ps = beforeErasure {
+ val ps = enteringErasure {
sym.info.parents dropWhile (p => p.symbol.name != mix)
}
assert(!ps.isEmpty, tree);
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 7a0b034fd0..a37ef29355 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -32,21 +32,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
newStaticInits.clear()
symbolsStoredAsStatic.clear()
}
- private def savingStatics[T](body: => T): T = {
- val savedNewStaticMembers : mutable.Buffer[Tree] = newStaticMembers.clone()
- val savedNewStaticInits : mutable.Buffer[Tree] = newStaticInits.clone()
- val savedSymbolsStoredAsStatic : mutable.Map[String, Symbol] = symbolsStoredAsStatic.clone()
- val result = body
-
- clearStatics()
- newStaticMembers ++= savedNewStaticMembers
- newStaticInits ++= savedNewStaticInits
- symbolsStoredAsStatic ++= savedSymbolsStoredAsStatic
-
- result
- }
private def transformTemplate(tree: Tree) = {
- val Template(parents, self, body) = tree
+ val Template(_, _, body) = tree
clearStatics()
val newBody = transformTrees(body)
val templ = deriveTemplate(tree)(_ => transformTrees(newStaticMembers.toList) ::: newBody)
@@ -60,21 +47,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private var localTyper: analyzer.Typer = null
- private object MethodDispatchType extends scala.Enumeration {
- val NO_CACHE, MONO_CACHE, POLY_CACHE = Value
- }
- import MethodDispatchType.{ NO_CACHE, MONO_CACHE, POLY_CACHE }
- private def dispatchType() = settings.refinementMethodDispatch.value match {
- case "no-cache" => NO_CACHE
- case "mono-cache" => MONO_CACHE
- case "poly-cache" => POLY_CACHE
- }
-
- def shouldRewriteTry(tree: Try) = {
- val sym = tree.tpe.typeSymbol
- forMSIL && (sym != UnitClass) && (sym != NothingClass)
- }
-
private def typedWithPos(pos: Position)(tree: Tree) =
localTyper.typedPos(pos)(tree)
@@ -91,7 +63,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def transformApplyDynamic(ad: ApplyDynamic) = {
val qual0 = ad.qual
val params = ad.args
- if (settings.logReflectiveCalls.value)
+ if (settings.logReflectiveCalls)
unit.echo(ad.pos, "method invocation uses reflection")
val typedPos = typedWithPos(ad.pos) _
@@ -106,7 +78,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
if (isFinal) FINAL else 0
)
- val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags) setInfoAndEnter forType
+ val varSym = currentClass.newVariable(mkTerm("" + forName), ad.pos, flags.toLong) setInfoAndEnter forType
if (!isFinal)
varSym.addAnnotation(VolatileAttr)
@@ -120,7 +92,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
def addStaticMethodToClass(forBody: (Symbol, Symbol) => Tree): Symbol = {
- val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName), ad.pos, STATIC | SYNTHETIC)
+ val methSym = currentClass.newMethod(mkTerm(nme.reflMethodName.toString), ad.pos, STATIC | SYNTHETIC)
val params = methSym.newSyntheticValueParams(List(ClassClass.tpe))
methSym setInfoAndEnter MethodType(params, MethodClass.tpe)
@@ -132,147 +104,79 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def fromTypesToClassArrayLiteral(paramTypes: List[Type]): Tree =
ArrayValue(TypeTree(ClassClass.tpe), paramTypes map LIT)
- /* ... */
- def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = dispatchType match {
- case NO_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)":
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- def reflMethod$Method(forReceiver: JClass[_]): JMethod =
- forReceiver.getMethod("xyz", reflParams$Cache)
-
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- addStaticMethodToClass((_, forReceiverSym) =>
- gen.mkMethodCall(REF(forReceiverSym), Class_getMethod, Nil, List(LIT(method), REF(reflParamsCacheSym)))
- )
-
- case MONO_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (but with a SoftReference wrapping reflClass$Cache, similarly in the poly Cache) :
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- var reflMethod$Cache: JMethod = null
-
- var reflClass$Cache: JClass[_] = null
-
- def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- if (reflClass$Cache != forReceiver) {
- reflMethod$Cache = forReceiver.getMethod("xyz", reflParams$Cache)
- reflClass$Cache = forReceiver
- }
- reflMethod$Cache
- }
-
- */
-
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
-
- val reflMethodCacheSym: Symbol =
- addStaticVariableToClass(nme.reflMethodCacheName, MethodClass.tpe, NULL, false)
+ def reflectiveMethodCache(method: String, paramTypes: List[Type]): Symbol = {
+ /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
+ (SoftReference so that it does not interfere with classloader garbage collection,
+ see ticket #2365 for details):
- val reflClassCacheSym: Symbol =
- addStaticVariableToClass(nme.reflClassCacheName, SoftReferenceClass.tpe, NULL, false)
+ var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
- def isCacheEmpty(receiver: Symbol): Tree =
- reflClassCacheSym.IS_NULL() OR (reflClassCacheSym.GET() OBJ_NE REF(receiver))
+ var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
- addStaticMethodToClass((_, forReceiverSym) =>
- BLOCK(
- IF (isCacheEmpty(forReceiverSym)) THEN BLOCK(
- REF(reflMethodCacheSym) === ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym))) ,
- REF(reflClassCacheSym) === gen.mkSoftRef(REF(forReceiverSym)),
- UNIT
- ) ENDIF,
- REF(reflMethodCacheSym)
- )
- )
-
- case POLY_CACHE =>
-
- /* Implementation of the cache is as follows for method "def xyz(a: A, b: B)"
- (SoftReference so that it does not interfere with classloader garbage collection, see ticket
- #2365 for details):
-
- var reflParams$Cache: Array[Class[_]] = Array[JClass](classOf[A], classOf[B])
-
- var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
+ def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
+ var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ if (methodCache eq null) {
+ methodCache = new EmptyMethodCache
+ reflPoly$Cache = new SoftReference(methodCache)
+ }
+ var method: JMethod = methodCache.find(forReceiver)
+ if (method ne null)
+ return method
+ else {
+ method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
+ reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
+ return method
+ }
+ }
+ */
- def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
- if (methodCache eq null) {
- methodCache = new EmptyMethodCache
- reflPoly$Cache = new SoftReference(methodCache)
- }
- var method: JMethod = methodCache.find(forReceiver)
- if (method ne null)
- return method
- else {
- method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
- return method
- }
- }
+ val reflParamsCacheSym: Symbol =
+ addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
- */
+ def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
+ val reflPolyCacheSym: Symbol = addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- val reflParamsCacheSym: Symbol =
- addStaticVariableToClass(nme.reflParamsCacheName, arrayType(ClassClass.tpe), fromTypesToClassArrayLiteral(paramTypes), true)
+ def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
- def mkNewPolyCache = gen.mkSoftRef(NEW(TypeTree(EmptyMethodCacheClass.tpe)))
- val reflPolyCacheSym: Symbol = (
- addStaticVariableToClass(nme.reflPolyCacheName, SoftReferenceClass.tpe, mkNewPolyCache, false)
- )
- def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
+ addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
+ val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
- addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
- val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
- val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
+ BLOCK(
+ VAL(methodCache) === getPolyCache,
+ IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+ REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+ ) ENDIF,
+ VAL(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ IF (REF(methodSym) OBJ_NE NULL) .
+ THEN (Return(REF(methodSym)))
+ ELSE {
+ def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
+ def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
- VAR(methodCache) === getPolyCache,
- IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
- REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
- ) ENDIF,
-
- VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
- IF (REF(methodSym) OBJ_NE NULL) .
- THEN (Return(REF(methodSym)))
- ELSE {
- def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
- BLOCK(
- REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
- REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
- Return(REF(methodSym))
- )
- }
+ REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
+ Return(REF(methodSym))
)
- })
-
+ }
+ )
+ })
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
def testForName(name: Name): Tree => Tree = t => (
if (nme.CommonOpNames(name))
- gen.mkMethodCall(definitions.Boxes_isNumberOrBool, t :: Nil)
+ gen.mkMethodCall(Boxes_isNumberOrBool, t :: Nil)
else if (nme.BooleanOpNames(name))
t IS_OBJ BoxedBooleanClass.tpe
else
- gen.mkMethodCall(definitions.Boxes_isNumber, t :: Nil)
+ gen.mkMethodCall(Boxes_isNumber, t :: Nil)
)
- /** The Tree => Tree function in the return is necessary to prevent the original qual
+ /* The Tree => Tree function in the return is necessary to prevent the original qual
* from being duplicated in the resulting code. It may be a side-effecting expression,
* so all the test logic is routed through gen.evalOnce, which creates a block like
* { val x$1 = qual; if (x$1.foo || x$1.bar) f1(x$1) else f2(x$1) }
@@ -284,7 +188,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else if (params.tail.isEmpty) nme.primitiveInfixMethodName(name)
else nme.NO_NAME
)
- definitions.getDeclIfDefined(BoxesRunTimeClass, methodName) match {
+ getDeclIfDefined(BoxesRunTimeClass, methodName) match {
case NoSymbol => None
case sym => assert(!sym.isOverloaded, sym) ; Some((sym, testForName(name)))
}
@@ -322,11 +226,11 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// If there's any chance this signature could be met by an Array.
val isArrayMethodSignature = {
def typesMatchApply = paramTypes match {
- case List(tp) => tp <:< IntClass.tpe
+ case List(tp) => tp <:< IntTpe
case _ => false
}
def typesMatchUpdate = paramTypes match {
- case List(tp1, tp2) => (tp1 <:< IntClass.tpe) && isMaybeUnit
+ case List(tp1, tp2) => (tp1 <:< IntTpe) && isMaybeUnit
case _ => false
}
@@ -357,13 +261,13 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else if (resultSym == ObjectClass) tree // no cast necessary
else gen.mkCast(tree, boxedResType) // cast to expected type
- /** Normal non-Array call */
+ /* Normal non-Array call */
def genDefaultCall = {
// reflective method call machinery
val invokeName = MethodClass.tpe member nme.invoke_ // scala.reflect.Method.invoke(...)
def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
- def lookup = Apply(cache, List(qual1() GETCLASS)) // get Method object from cache
- def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
+ def lookup = Apply(cache, List(qual1() GETCLASS())) // get Method object from cache
+ def invokeArgs = ArrayValue(TypeTree(ObjectTpe), params) // args for invocation
def invocation = (lookup DOT invokeName)(qual1(), invokeArgs) // .invoke(qual1, ...)
// exception catching machinery
@@ -375,7 +279,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
fixResult(TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY)
}
- /** A possible primitive method call, represented by methods in BoxesRunTime. */
+ /* A possible primitive method call, represented by methods in BoxesRunTime. */
def genValueCall(operator: Symbol) = fixResult(REF(operator) APPLY args)
def genValueCallWithTest = {
getPrimitiveReplacementForStructuralCall(methSym.name) match {
@@ -386,7 +290,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- /** A native Array call. */
+ /* A native Array call. */
def genArrayCall = fixResult(
methSym.name match {
case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
@@ -397,9 +301,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
mustBeUnit = methSym.name == nme.update
)
- /** A conditional Array call, when we can't determine statically if the argument is
- * an Array, but the structural type method signature is consistent with an Array method
- * so we have to generate both kinds of code.
+ /* A conditional Array call, when we can't determine statically if the argument is
+ * an Array, but the structural type method signature is consistent with an Array method
+ * so we have to generate both kinds of code.
*/
def genArrayCallWithTest =
IF ((qual1() GETCLASS()) DOT nme.isArray) THEN genArrayCall ELSE genDefaultCall
@@ -413,99 +317,75 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
}
- if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
-/* val guardCallSite: Tree = {
- val cachedClass = addStaticVariableToClass("cachedClass", definitions.ClassClass.tpe, EmptyTree)
- val tmpVar = currentOwner.newVariable(ad.pos, unit.freshTermName(ad.pos, "x")).setInfo(definitions.AnyRefClass.tpe)
- atPos(ad.pos)(Block(List(
- ValDef(tmpVar, transform(qual))),
- If(Apply(Select(gen.mkAttributedRef(cachedClass), nme.EQ), List(getClass(Ident(tmpVar)))),
- Block(List(Assign(gen.mkAttributedRef(cachedClass), getClass(Ident(tmpVar)))),
- treeCopy.ApplyDynamic(ad, Ident(tmpVar), transformTrees(params))),
- EmptyTree)))
- }
- //println(guardCallSite)
-*/
- localTyper.typed(treeCopy.ApplyDynamic(ad, transform(qual), transformTrees(params)))
- }
- else {
-
- /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad@ApplyDynamic(qual, params) ### */
-
- /* This creates the tree that does the reflective call (see general comment
- * on the apply-dynamic tree for its format). This tree is simply composed
- * of three successive calls, first to getClass on the callee, then to
- * getMethod on the class, then to invoke on the method.
- * - getMethod needs an array of classes for choosing one amongst many
- * overloaded versions of the method. This is provided by paramTypeClasses
- * and must be done on the static type as Scala's dispatching is static on
- * the parameters.
- * - invoke needs an array of AnyRefs that are the method's arguments. The
- * erasure phase guarantees that any parameter passed to a dynamic apply
- * is compatible (through boxing). Boxed ints et al. is what invoke expects
- * when the applied method expects ints, hence no change needed there.
- * - in the end, the result of invoke must be fixed, again to deal with arrays.
- * This is provided by fixResult. fixResult will cast the invocation's result
- * to the method's return type, which is generally ok, except when this type
- * is a value type (int et al.) in which case it must cast to the boxed version
- * because invoke only returns object and erasure made sure the result is
- * expected to be an AnyRef. */
- val t: Tree = {
- val (mparams, resType) = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, ((params, mparams)))
- (mparams, resType)
- case tpe @ OverloadedType(pre, alts) =>
- unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
- alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
- case mt @ MethodType(mparams, resType) :: Nil =>
- unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
- (mparams, resType)
- case _ =>
- unit.error(ad.pos, "Cannot resolve overload.")
- (Nil, NoType)
- }
- }
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
-
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ {
+
+ /* ### BODY OF THE TRANSFORMATION -> remember we're in case ad@ApplyDynamic(qual, params) ### */
+
+ /* This creates the tree that does the reflective call (see general comment
+ * on the apply-dynamic tree for its format). This tree is simply composed
+ * of three successive calls, first to getClass on the callee, then to
+ * getMethod on the class, then to invoke on the method.
+ * - getMethod needs an array of classes for choosing one amongst many
+ * overloaded versions of the method. This is provided by paramTypeClasses
+ * and must be done on the static type as Scala's dispatching is static on
+ * the parameters.
+ * - invoke needs an array of AnyRefs that are the method's arguments. The
+ * erasure phase guarantees that any parameter passed to a dynamic apply
+ * is compatible (through boxing). Boxed ints et al. is what invoke expects
+ * when the applied method expects ints, hence no change needed there.
+ * - in the end, the result of invoke must be fixed, again to deal with arrays.
+ * This is provided by fixResult. fixResult will cast the invocation's result
+ * to the method's return type, which is generally ok, except when this type
+ * is a value type (int et al.) in which case it must cast to the boxed version
+ * because invoke only returns object and erasure made sure the result is
+ * expected to be an AnyRef. */
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
}
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- /* For testing purposes, the dynamic application's condition
- * can be printed-out in great detail. Remove? */
- if (settings.debug.value) {
- def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
- val mstr = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- """| with
- | - declared parameter types: '%s'
- | - passed argument types: '%s'
- | - result type: '%s'""" .
- stripMargin.format(
- paramsToString(mparams),
- paramsToString(params),
- resType.toString
- )
- case _ => ""
- }
- log(
- """Dynamically application '%s.%s(%s)' %s - resulting code: '%s'""".format(
- qual, ad.symbol.name, paramsToString(params), mstr, t
- )
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
)
}
+ }
- /* We return the dynamic call tree, after making sure no other
- * clean-up transformation are to be applied on it. */
- transform(t)
+ /* For testing purposes, the dynamic application's condition
+ * can be printed-out in great detail. Remove? */
+ if (settings.debug) {
+ def paramsToString(xs: Any*) = xs map (_.toString) mkString ", "
+ val mstr = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ sm"""| with
+ | - declared parameter types: '${paramsToString(mparams)}'
+ | - passed argument types: '${paramsToString(params)}'
+ | - result type: '${resType.toString}'"""
+ case _ => ""
+ }
+ log(s"""Dynamically application '$qual.${ad.symbol.name}(${paramsToString(params)})' $mstr - resulting code: '$t'""")
}
- /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+
+ /* We return the dynamic call tree, after making sure no other
+ * clean-up transformation are to be applied on it. */
+ transform(t)
+ /* ### END OF DYNAMIC APPLY TRANSFORM ### */
+ }
}
override def transform(tree: Tree): Tree = tree match {
@@ -555,10 +435,9 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- if (forMSIL) savingStatics( transformTemplate(tree) )
- else transformTemplate(tree)
+ transformTemplate(tree)
- case Literal(c) if (c.tag == ClazzTag) && !forMSIL=>
+ case Literal(c) if c.tag == ClazzTag =>
val tpe = c.typeValue
typedWithPos(tree.pos) {
if (isPrimitiveValueClass(tpe.typeSymbol)) {
@@ -571,24 +450,6 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
else tree
}
- /* MSIL requires that the stack is empty at the end of a try-block.
- * Hence, we here rewrite all try blocks with a result != {Unit, All} such that they
- * store their result in a local variable. The catch blocks are adjusted as well.
- * The try tree is subsituted by a block whose result expression is read of that variable. */
- case theTry @ Try(block, catches, finalizer) if shouldRewriteTry(theTry) =>
- def transformTry = {
- val tpe = theTry.tpe.widen
- val tempVar = currentOwner.newVariable(mkTerm(nme.EXCEPTION_RESULT_PREFIX), theTry.pos).setInfo(tpe)
- def assignBlock(rhs: Tree) = super.transform(BLOCK(Ident(tempVar) === transform(rhs)))
-
- val newBlock = assignBlock(block)
- val newCatches = for (CaseDef(pattern, guard, body) <- catches) yield
- (CASE(super.transform(pattern)) IF (super.transform(guard))) ==> assignBlock(body)
- val newTry = Try(newBlock, newCatches, super.transform(finalizer))
-
- typedWithPos(theTry.pos)(BLOCK(VAL(tempVar) === EmptyTree, newTry, Ident(tempVar)))
- }
- transformTry
/*
* This transformation should identify Scala symbol invocations in the tree and replace them
* with references to a static member. Also, whenever a class has at least a single symbol invocation
@@ -657,9 +518,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
// create a symbol for the static field
val stfieldSym = (
currentClass.newVariable(mkTerm("symbol$"), pos, PRIVATE | STATIC | SYNTHETIC | FINAL)
- setInfo SymbolClass.tpe
+ setInfoAndEnter SymbolClass.tpe
)
- currentClass.info.decls enter stfieldSym
// create field definition and initialization
val stfieldDef = theTyper.typedPos(pos)(VAL(stfieldSym) === rhs)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 4891ef2fd1..385bf2dade 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -24,8 +24,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
protected def newTransformer(unit: CompilationUnit): Transformer =
new ConstructorTransformer(unit)
- private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]
- private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]
+ private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]()
+ private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]()
class ConstructorTransformer(unit: CompilationUnit) extends Transformer {
@@ -34,6 +34,41 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val stats = impl.body // the transformed template body
val localTyper = typer.atOwner(impl, clazz)
+ // Inspect for obvious out-of-order initialization; concrete, eager vals or vars,
+ // declared in this class, for which a reference to the member precedes its definition.
+ def checkableForInit(sym: Symbol) = (
+ (sym ne null)
+ && (sym.isVal || sym.isVar)
+ && !(sym hasFlag LAZY | DEFERRED | SYNTHETIC)
+ )
+ val uninitializedVals = mutable.Set[Symbol](
+ stats collect { case vd: ValDef if checkableForInit(vd.symbol) => vd.symbol.accessedOrSelf }: _*
+ )
+ if (uninitializedVals.nonEmpty)
+ log("Checking constructor for init order issues among: " + uninitializedVals.map(_.name).mkString(", "))
+
+ for (stat <- stats) {
+ // Checking the qualifier symbol is necessary to prevent a selection on
+ // another instance of the same class from potentially appearing to be a forward
+ // reference on the member in the current class.
+ def check(tree: Tree) = {
+ for (t <- tree) t match {
+ case t: RefTree if uninitializedVals(t.symbol.accessedOrSelf) && t.qualifier.symbol == clazz =>
+ unit.warning(t.pos, s"Reference to uninitialized ${t.symbol.accessedOrSelf}")
+ case _ =>
+ }
+ }
+ stat match {
+ case vd: ValDef =>
+ // doing this first allows self-referential vals, which to be a conservative
+ // warner we will do because it's possible though difficult for it to be useful.
+ uninitializedVals -= vd.symbol.accessedOrSelf
+ if (!vd.symbol.isLazy)
+ check(vd.rhs)
+ case _: MemberDef => // skip other member defs
+ case t => check(t) // constructor body statement
+ }
+ }
val specializedFlag: Symbol = clazz.info.decl(nme.SPECIALIZED_INSTANCE)
val shouldGuard = (specializedFlag != NoSymbol) && !clazz.hasFlag(SPECIALIZED)
@@ -46,7 +81,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val constrInfo: ConstrInfo = {
stats find (_.symbol.isPrimaryConstructor) match {
case Some(ddef @ DefDef(_, _, _, List(vparams), _, rhs @ Block(_, _))) =>
- ConstrInfo(ddef, vparams map (_.symbol), rhs)
+ ConstrInfo(ddef, vparams map (_.symbol), rhs)
case x =>
// AnyVal constructor is OK
assert(clazz eq AnyValClass, "no constructor in template: impl = " + impl)
@@ -59,8 +94,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
val paramAccessors = clazz.constrParamAccessors
// The constructor parameter corresponding to an accessor
- def parameter(acc: Symbol): Symbol =
- parameterNamed(nme.getterName(acc.originalName))
+ def parameter(acc: Symbol): Symbol = parameterNamed(acc.unexpandedName.getterName)
// The constructor parameter with given name. This means the parameter
// has given name, or starts with given name, and continues with a `$` afterwards.
@@ -130,7 +164,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
if (from.name != nme.OUTER ||
from.tpe.typeSymbol.isPrimitiveValueClass) result
else localTyper.typedPos(to.pos) {
- IF (from OBJ_EQ NULL) THEN Throw(NullPointerExceptionClass.tpe) ELSE result
+ // `throw null` has the same effect as `throw new NullPointerException`, see JVM spec on instruction `athrow`
+ IF (from OBJ_EQ NULL) THEN Throw(gen.mkZero(ThrowableTpe)) ELSE result
}
}
@@ -156,8 +191,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
stat match {
case ValDef(mods, name, _, _) if (mods hasFlag PRESUPER) =>
// stat is the constructor-local definition of the field value
- val fields = presupers filter (
- vdef => nme.localToGetter(vdef.name) == name)
+ val fields = presupers filter (_.getterName == name)
assert(fields.length == 1)
val to = fields.head.symbol
if (!to.tpe.isInstanceOf[ConstantType])
@@ -188,7 +222,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// Lazy vals don't get the assignment in the constructor.
if (!stat.symbol.tpe.isInstanceOf[ConstantType]) {
if (rhs != EmptyTree && !stat.symbol.isLazy) {
- val rhs1 = intoConstructor(stat.symbol, rhs);
+ val rhs1 = intoConstructor(stat.symbol, rhs)
(if (canBeMoved(stat)) constrPrefixBuf else constrStatBuf) += mkAssign(
stat.symbol, rhs1)
}
@@ -202,7 +236,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
constrStatBuf += intoConstructor(impl.symbol, stat)
}
- // ----------- avoid making fields for symbols that are not accessed --------------
+ // ----------- avoid making parameter-accessor fields for symbols accessed only within the primary constructor --------------
// A sorted set of symbols that are known to be accessed outside the primary constructor.
val accessedSyms = new TreeSet[Symbol]((x, y) => x isLess y)
@@ -210,6 +244,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// a list of outer accessor symbols and their bodies
var outerAccessors: List[(Symbol, Tree)] = List()
+ val isDelayedInitSubclass = (clazz isSubClass DelayedInitClass)
+
// Could symbol's definition be omitted, provided it is not accessed?
// This is the case if the symbol is defined in the current class, and
// ( the symbol is an object private parameter accessor field, or
@@ -217,12 +253,12 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def maybeOmittable(sym: Symbol) = sym.owner == clazz && (
sym.isParamAccessor && sym.isPrivateLocal ||
sym.isOuterAccessor && sym.owner.isEffectivelyFinal && !sym.isOverridingSymbol &&
- !(clazz isSubClass DelayedInitClass)
+ !isDelayedInitSubclass
)
// Is symbol known to be accessed outside of the primary constructor,
// or is it a symbol whose definition cannot be omitted anyway?
- def mustbeKept(sym: Symbol) = !maybeOmittable(sym) || (accessedSyms contains sym)
+ def mustbeKept(sym: Symbol) = isDelayedInitSubclass || !maybeOmittable(sym) || (accessedSyms contains sym)
// A traverser to set accessedSyms and outerAccessors
val accessTraverser = new Traverser {
@@ -268,10 +304,10 @@ abstract class Constructors extends Transform with ast.TreeDSL {
copyParam(acc, parameter(acc))
}
- /** Return a single list of statements, merging the generic class constructor with the
- * specialized stats. The original statements are retyped in the current class, and
- * assignments to generic fields that have a corresponding specialized assignment in
- * `specializedStats` are replaced by the specialized assignment.
+ /* Return a single list of statements, merging the generic class constructor with the
+ * specialized stats. The original statements are retyped in the current class, and
+ * assignments to generic fields that have a corresponding specialized assignment in
+ * `specializedStats` are replaced by the specialized assignment.
*/
def mergeConstructors(genericClazz: Symbol, originalStats: List[Tree], specializedStats: List[Tree]): List[Tree] = {
val specBuf = new ListBuffer[Tree]
@@ -279,17 +315,15 @@ abstract class Constructors extends Transform with ast.TreeDSL {
def specializedAssignFor(sym: Symbol): Option[Tree] =
specializedStats find {
- case Assign(sel @ Select(This(_), _), rhs) =>
- ( (sel.symbol hasFlag SPECIALIZED)
- && (nme.unspecializedName(nme.localToGetter(sel.symbol.name)) == nme.localToGetter(sym.name))
- )
+ case Assign(sel @ Select(This(_), _), _) =>
+ sel.symbol.isSpecialized && (nme.unspecializedName(sel.symbol.getterName) == sym.getterName)
case _ => false
}
- /** Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
- * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
- * variable, but after specialization this is a concrete primitive type, so it would
- * be an error to pass it to array_update(.., .., Object).
+ /* Rewrite calls to ScalaRunTime.array_update to the proper apply method in scala.Array.
+ * Erasure transforms Array.update to ScalaRunTime.update when the element type is a type
+ * variable, but after specialization this is a concrete primitive type, so it would
+ * be an error to pass it to array_update(.., .., Object).
*/
def rewriteArrayUpdate(tree: Tree): Tree = {
val adapter = new Transformer {
@@ -338,22 +372,22 @@ abstract class Constructors extends Transform with ast.TreeDSL {
res
}
- /** Add an 'if' around the statements coming after the super constructor. This
- * guard is necessary if the code uses specialized fields. A specialized field is
- * initialized in the subclass constructor, but the accessors are (already) overridden
- * and pointing to the (empty) fields. To fix this, a class with specialized fields
- * will not run its constructor statements if the instance is specialized. The specialized
- * subclass includes a copy of those constructor statements, and runs them. To flag that a class
- * has specialized fields, and their initialization should be deferred to the subclass, method
- * 'specInstance$' is added in phase specialize.
+ /* Add an 'if' around the statements coming after the super constructor. This
+ * guard is necessary if the code uses specialized fields. A specialized field is
+ * initialized in the subclass constructor, but the accessors are (already) overridden
+ * and pointing to the (empty) fields. To fix this, a class with specialized fields
+ * will not run its constructor statements if the instance is specialized. The specialized
+ * subclass includes a copy of those constructor statements, and runs them. To flag that a class
+ * has specialized fields, and their initialization should be deferred to the subclass, method
+ * 'specInstance$' is added in phase specialize.
*/
def guardSpecializedInitializer(stats: List[Tree]): List[Tree] = if (settings.nospecialization.value) stats else {
- // split the statements in presuper and postsuper
- // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
- // if (postfix.nonEmpty) {
- // prefix = prefix :+ postfix.head
- //postfix = postfix.tail
- //}
+ // // split the statements in presuper and postsuper
+ // var (prefix, postfix) = stats0.span(tree => !((tree.symbol ne null) && tree.symbol.isConstructor))
+ // if (postfix.nonEmpty) {
+ // prefix = prefix :+ postfix.head
+ // postfix = postfix.tail
+ // }
if (usesSpecializedField && shouldGuard && stats.nonEmpty) {
// save them for duplication in the specialized subclass
@@ -366,7 +400,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
CODE.NOT (
Apply(gen.mkAttributedRef(specializedFlag), List())),
List()),
- Block(stats, Literal(Constant())),
+ Block(stats, Literal(Constant(()))),
EmptyTree)
List(localTyper.typed(tree))
@@ -383,145 +417,121 @@ abstract class Constructors extends Transform with ast.TreeDSL {
}
} else stats
}
-/*
- def isInitDef(stat: Tree) = stat match {
- case dd: DefDef => dd.symbol == delayedInitMethod
- case _ => false
- }
-*/
- /** Create a getter or a setter and enter into `clazz` scope
- */
- def addAccessor(sym: Symbol, name: TermName, flags: Long) = {
- val m = clazz.newMethod(name, sym.pos, flags & ~(LOCAL | PRIVATE)) setPrivateWithin clazz
- clazz.info.decls enter m
- }
-
- def addGetter(sym: Symbol): Symbol = {
- val getr = addAccessor(
- sym, nme.getterName(sym.name), getterFlags(sym.flags))
- getr setInfo MethodType(List(), sym.tpe)
- defBuf += localTyper.typedPos(sym.pos)(DefDef(getr, Select(This(clazz), sym)))
- getr
- }
-
- def addSetter(sym: Symbol): Symbol = {
- sym setFlag MUTABLE
- val setr = addAccessor(
- sym, nme.getterToSetter(nme.getterName(sym.name)), setterFlags(sym.flags))
- setr setInfo MethodType(setr.newSyntheticValueParams(List(sym.tpe)), UnitClass.tpe)
- defBuf += localTyper.typed {
- //util.trace("adding setter def for "+setr) {
- atPos(sym.pos) {
- DefDef(setr, paramss =>
- Assign(Select(This(clazz), sym), Ident(paramss.head.head)))
- }//}
- }
- setr
- }
+ /*
+ * Translation scheme for DelayedInit
+ * ----------------------------------
+ *
+ * Before returning, transformClassTemplate() rewrites DelayedInit subclasses.
+ * The list of statements that will end up in the primary constructor can be split into:
+ *
+ * (a) up to and including the super-constructor call.
+ * These statements can occur only in the (bytecode-level) primary constructor.
+ *
+ * (b) remaining statements
+ *
+ * The purpose of DelayedInit is leaving (b) out of the primary constructor and have their execution "delayed".
+ *
+ * The rewriting to achieve "delayed initialization" involves:
+ * (c) an additional, synthetic, public method encapsulating (b)
+ * (d) an additional, synthetic closure whose argless apply() just invokes (c)
+ * (e) after executing the statements in (a),
+ * the primary constructor instantiates (d) and passes it as argument
+ * to a `delayedInit()` invocation on the current instance.
+ * In turn, `delayedInit()` is a method defined as abstract in the `DelayedInit` trait
+ * so that it can be overridden (for an example see `scala.App`)
+ *
+ * The following helper methods prepare Trees as part of this rewriting:
+ *
+ * (f) `delayedEndpointDef()` prepares (c).
+ * A transformer, `constrStatTransformer`, is used to re-locate statements (b) from template-level
+ * to become statements in method (c). The main task here is re-formulating accesses to params
+ * of the primary constructors (to recap, (c) has zero-params) in terms of param-accessor fields.
+ * In a Delayed-Init subclass, each class-constructor gets a param-accessor field because `mustbeKept()` forces it.
+ *
+ * (g) `delayedInitClosure()` prepares (d)
+ *
+ * (h) `delayedInitCall()` prepares the `delayedInit()` invocation referred to in (e)
+ *
+ * Both (c) and (d) are added to the Template returned by `transformClassTemplate()`
+ *
+ * A note of historic interest: Previously the rewriting for DelayedInit would include in the closure body
+ * all of the delayed initialization sequence, which in turn required:
+ * - reformulating "accesses-on-this" into "accesses-on-outer", and
+ * - adding public getters and setters.
+ *
+ * @param stats the statements in (b) above
+ *
+ * @return the DefDef for (c) above
+ *
+ * */
+ def delayedEndpointDef(stats: List[Tree]): DefDef = {
- def ensureAccessor(sym: Symbol)(acc: => Symbol) =
- if (sym.owner == clazz && !sym.isMethod && sym.isPrivate) { // there's an access to a naked field of the enclosing class
- var getr = acc
- getr makeNotPrivate clazz
- getr
- } else {
- if (sym.owner == clazz) sym makeNotPrivate clazz
- NoSymbol
- }
+ val methodName = currentUnit.freshTermName("delayedEndpoint$" + clazz.fullNameAsName('$').toString + "$")
+ val methodSym = clazz.newMethod(methodName, impl.pos, SYNTHETIC | FINAL)
+ methodSym setInfoAndEnter MethodType(Nil, UnitTpe)
- def ensureGetter(sym: Symbol): Symbol = ensureAccessor(sym) {
- val getr = sym.getter(clazz)
- if (getr != NoSymbol) getr else addGetter(sym)
- }
+ // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago.
+ val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym)
+ val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) }
- def ensureSetter(sym: Symbol): Symbol = ensureAccessor(sym) {
- var setr = sym.setter(clazz, hasExpandedName = false)
- if (setr == NoSymbol) setr = sym.setter(clazz, hasExpandedName = true)
- if (setr == NoSymbol) setr = addSetter(sym)
- setr
+ delayedDD.asInstanceOf[DefDef]
}
- def delayedInitClosure(stats: List[Tree]) =
- localTyper.typed {
+ /* @see overview at `delayedEndpointDef()` of the translation scheme for DelayedInit */
+ def delayedInitClosure(delayedEndPointSym: MethodSymbol): ClassDef = {
+ val satelliteClass = localTyper.typed {
atPos(impl.pos) {
val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL)
val closureParents = List(AbstractFunctionClass(0).tpe)
closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass)
- val outerField = (
+ val outerField: TermSymbol = (
closureClass
newValue(nme.OUTER, impl.pos, PrivateLocal | PARAMACCESSOR)
setInfoAndEnter clazz.tpe
)
- val applyMethod = (
+ val applyMethod: MethodSymbol = (
closureClass
newMethod(nme.apply, impl.pos, FINAL)
- setInfoAndEnter MethodType(Nil, ObjectClass.tpe)
+ setInfoAndEnter MethodType(Nil, ObjectTpe)
)
val outerFieldDef = ValDef(outerField)
val closureClassTyper = localTyper.atOwner(closureClass)
val applyMethodTyper = closureClassTyper.atOwner(applyMethod)
- val constrStatTransformer = new Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case This(_) if tree.symbol == clazz =>
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Select(This(closureClass), outerField)
- }
- }
- case _ =>
- super.transform {
- tree match {
- case Select(qual, _) =>
- val getter = ensureGetter(tree.symbol)
- if (getter != NoSymbol)
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Apply(Select(qual, getter), List())
- }
- }
- else tree
- case Assign(lhs @ Select(qual, _), rhs) =>
- val setter = ensureSetter(lhs.symbol)
- if (setter != NoSymbol)
- applyMethodTyper.typed {
- atPos(tree.pos) {
- Apply(Select(qual, setter), List(rhs))
- }
- }
- else tree
- case _ =>
- tree.changeOwner(impl.symbol -> applyMethod)
- }
- }
+ def applyMethodStat =
+ applyMethodTyper.typed {
+ atPos(impl.pos) {
+ val receiver = Select(This(closureClass), outerField)
+ Apply(Select(receiver, delayedEndPointSym), Nil)
+ }
}
- }
-
- def applyMethodStats = constrStatTransformer.transformTrees(stats)
val applyMethodDef = DefDef(
sym = applyMethod,
vparamss = ListOfNil,
- rhs = Block(applyMethodStats, gen.mkAttributedRef(BoxedUnit_UNIT)))
+ rhs = Block(applyMethodStat, gen.mkAttributedRef(BoxedUnit_UNIT)))
ClassDef(
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
- argss = ListOfNil,
- body = List(applyMethodDef),
+ body = applyMethodDef :: Nil,
superPos = impl.pos)
}
}
+ satelliteClass.asInstanceOf[ClassDef]
+ }
+
+ /* @see overview at `delayedEndpointDef()` of the translation scheme for DelayedInit */
def delayedInitCall(closure: Tree) = localTyper.typedPos(impl.pos) {
gen.mkMethodCall(This(clazz), delayedInitMethod, Nil, List(New(closure.symbol.tpe, This(clazz))))
}
- /** Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
+ /* Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) */
def splitAtSuper(stats: List[Tree]) = {
def isConstr(tree: Tree) = (tree.symbol ne null) && tree.symbol.isConstructor
val (pre, rest0) = stats span (!isConstr(_))
@@ -529,22 +539,28 @@ abstract class Constructors extends Transform with ast.TreeDSL {
(pre ::: supercalls, rest)
}
- var (uptoSuperStats, remainingConstrStats) = splitAtSuper(constrStatBuf.toList)
+ val (uptoSuperStats, remainingConstrStats0) = splitAtSuper(constrStatBuf.toList)
+ var remainingConstrStats = remainingConstrStats0
- /** XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
- * but excluding it includes too much. The constructor sequence being mimicked
- * needs to be reproduced with total fidelity.
+ /* XXX This is not corect: remainingConstrStats.nonEmpty excludes too much,
+ * but excluding it includes too much. The constructor sequence being mimicked
+ * needs to be reproduced with total fidelity.
*
- * See test case files/run/bug4680.scala, the output of which is wrong in many
- * particulars.
+ * See test case files/run/bug4680.scala, the output of which is wrong in many
+ * particulars.
*/
- val needsDelayedInit =
- (clazz isSubClass DelayedInitClass) /*&& !(defBuf exists isInitDef)*/ && remainingConstrStats.nonEmpty
+ val needsDelayedInit = (isDelayedInitSubclass && remainingConstrStats.nonEmpty)
if (needsDelayedInit) {
- val dicl = new ConstructorTransformer(unit) transform delayedInitClosure(remainingConstrStats)
- defBuf += dicl
- remainingConstrStats = List(delayedInitCall(dicl))
+ val delayedHook: DefDef = delayedEndpointDef(remainingConstrStats)
+ defBuf += delayedHook
+ val hookCallerClass = {
+ // transform to make the closure-class' default constructor assign the the outer instance to its param-accessor field.
+ val drillDown = new ConstructorTransformer(unit)
+ drillDown transform delayedInitClosure(delayedHook.symbol.asInstanceOf[MethodSymbol])
+ }
+ defBuf += hookCallerClass
+ remainingConstrStats = delayedInitCall(hookCallerClass) :: Nil
}
// Assemble final constructor
@@ -566,12 +582,14 @@ abstract class Constructors extends Transform with ast.TreeDSL {
deriveTemplate(impl)(_ => defBuf.toList filter (stat => mustbeKept(stat.symbol)))
} // transformClassTemplate
- override def transform(tree: Tree): Tree =
+ override def transform(tree: Tree): Tree = {
tree match {
case ClassDef(_,_,_,_) if !tree.symbol.isInterface && !isPrimitiveValueClass(tree.symbol) =>
deriveClassDef(tree)(transformClassTemplate)
case _ =>
super.transform(tree)
}
+ }
+
} // ConstructorTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 76249974ac..0f65b11e9b 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -10,6 +10,7 @@ import scala.reflect.internal.ClassfileConstants._
import scala.collection.{ mutable, immutable }
import symtab._
import Flags._
+import scala.reflect.internal.Mode._
abstract class Erasure extends AddInterfaces
with scala.reflect.internal.transform.Erasure
@@ -49,7 +50,7 @@ abstract class Erasure extends AddInterfaces
if (sym == ArrayClass) args foreach traverse
else if (sym.isTypeParameterOrSkolem || sym.isExistentiallyBound || !args.isEmpty) result = true
else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
- else if (!sym.owner.isPackageClass) traverse(pre)
+ else if (!sym.isTopLevel) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
result = true
case RefinedType(parents, _) =>
@@ -65,8 +66,8 @@ abstract class Erasure extends AddInterfaces
}
}
- override protected def verifyJavaErasure = settings.Xverify.value || settings.debug.value
- def needsJavaSig(tp: Type) = !settings.Ynogenericsig.value && NeedsSigCollector.collect(tp)
+ override protected def verifyJavaErasure = settings.Xverify || settings.debug
+ def needsJavaSig(tp: Type) = !settings.Ynogenericsig && NeedsSigCollector.collect(tp)
// only refer to type params that will actually make it into the sig, this excludes:
// * higher-order type parameters
@@ -100,7 +101,7 @@ abstract class Erasure extends AddInterfaces
* unboxing some primitive types and further simplifications as they are done in jsig.
*/
val prepareSigMap = new TypeMap {
- def squashBoxed(tp: Type): Type = tp.normalize match {
+ def squashBoxed(tp: Type): Type = tp.dealiasWiden match {
case t @ RefinedType(parents, decls) =>
val parents1 = parents mapConserve squashBoxed
if (parents1 eq parents) tp
@@ -110,10 +111,10 @@ abstract class Erasure extends AddInterfaces
if (tpe1 eq tpe) t
else ExistentialType(tparams, tpe1)
case t =>
- if (boxedClass contains t.typeSymbol) ObjectClass.tpe
+ if (boxedClass contains t.typeSymbol) ObjectTpe
else tp
}
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case tp1 @ TypeBounds(lo, hi) =>
val lo1 = squashBoxed(apply(lo))
val hi1 = squashBoxed(apply(hi))
@@ -122,16 +123,16 @@ abstract class Erasure extends AddInterfaces
case tp1 @ TypeRef(pre, sym, args) =>
def argApply(tp: Type) = {
val tp1 = apply(tp)
- if (tp1.typeSymbol == UnitClass) ObjectClass.tpe
+ if (tp1.typeSymbol == UnitClass) ObjectTpe
else squashBoxed(tp1)
}
if (sym == ArrayClass && args.nonEmpty)
- if (unboundedGenericArrayLevel(tp1) == 1) ObjectClass.tpe
+ if (unboundedGenericArrayLevel(tp1) == 1) ObjectTpe
else mapOver(tp1)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
- ObjectClass.tpe
+ ObjectTpe
else if (sym == UnitClass)
- BoxedUnitClass.tpe
+ BoxedUnitTpe
else if (sym == NothingClass)
RuntimeNothingClass.tpe
else if (sym == NullClass)
@@ -144,7 +145,7 @@ abstract class Erasure extends AddInterfaces
}
case tp1 @ MethodType(params, restpe) =>
val params1 = mapOver(params)
- val restpe1 = if (restpe.normalize.typeSymbol == UnitClass) UnitClass.tpe else apply(restpe)
+ val restpe1 = if (restpe.typeSymbol == UnitClass) UnitTpe else apply(restpe)
if ((params1 eq params) && (restpe1 eq restpe)) tp1
else MethodType(params1, restpe1)
case tp1 @ RefinedType(parents, decls) =>
@@ -162,8 +163,8 @@ abstract class Erasure extends AddInterfaces
}
}
- private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.normalize match {
- case RefinedType(parents, _) => parents map (_.normalize)
+ private def hiBounds(bounds: TypeBounds): List[Type] = bounds.hi.dealiasWiden match {
+ case RefinedType(parents, _) => parents map (_.dealiasWiden)
case tp => tp :: Nil
}
@@ -172,7 +173,7 @@ abstract class Erasure extends AddInterfaces
/** The Java signature of type 'info', for symbol sym. The symbol is used to give the right return
* type for constructors.
*/
- def javaSig(sym0: Symbol, info: Type): Option[String] = beforeErasure {
+ def javaSig(sym0: Symbol, info: Type): Option[String] = enteringErasure {
val isTraitSignature = sym0.enclClass.isTrait
def superSig(parents: List[Type]) = {
@@ -181,7 +182,7 @@ abstract class Erasure extends AddInterfaces
// java is unthrilled about seeing interfaces inherit from classes
val ok = parents filter (p => p.typeSymbol.isTrait || p.typeSymbol.isInterface)
// traits should always list Object.
- if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectClass.tpe :: ok
+ if (ok.isEmpty || ok.head.typeSymbol != ObjectClass) ObjectTpe :: ok
else ok
}
else parents
@@ -192,7 +193,7 @@ abstract class Erasure extends AddInterfaces
def boundsSig(bounds: List[Type]) = {
val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait)
val classPart = isClass match {
- case Nil => ":" // + boxedSig(ObjectClass.tpe)
+ case Nil => ":" // + boxedSig(ObjectTpe)
case x :: _ => ":" + boxedSig(x)
}
classPart :: (isTrait map boxedSig) mkString ":"
@@ -206,7 +207,7 @@ abstract class Erasure extends AddInterfaces
// Anything which could conceivably be a module (i.e. isn't known to be
// a type parameter or similar) must go through here or the signature is
// likely to end up with Foo<T>.Empty where it needs Foo<T>.Empty$.
- def fullNameInSig(sym: Symbol) = "L" + beforeIcode(sym.javaBinaryName)
+ def fullNameInSig(sym: Symbol) = "L" + enteringIcode(sym.javaBinaryName)
def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): String = {
val tp = tp0.dealias
@@ -219,8 +220,8 @@ abstract class Erasure extends AddInterfaces
def argSig(tp: Type) =
if (existentiallyBound contains tp.typeSymbol) {
val bounds = tp.typeSymbol.info.bounds
- if (!(AnyRefClass.tpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
- else if (!(bounds.lo <:< NullClass.tpe)) "-" + boxedSig(bounds.lo)
+ if (!(AnyRefTpe <:< bounds.hi)) "+" + boxedSig(bounds.hi)
+ else if (!(bounds.lo <:< NullTpe)) "-" + boxedSig(bounds.lo)
else "*"
} else {
boxedSig(tp)
@@ -246,7 +247,7 @@ abstract class Erasure extends AddInterfaces
// If args isEmpty, Array is being used as a type constructor
if (sym == ArrayClass && args.nonEmpty) {
- if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectClass.tpe)
+ if (unboundedGenericArrayLevel(tp) == 1) jsig(ObjectTpe)
else ARRAY_TAG.toString+(args map (jsig(_))).mkString
}
else if (isTypeParameterInSig(sym, sym0)) {
@@ -254,16 +255,16 @@ abstract class Erasure extends AddInterfaces
"" + TVAR_TAG + sym.name + ";"
}
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass)
- jsig(ObjectClass.tpe)
+ jsig(ObjectTpe)
else if (sym == UnitClass)
- jsig(BoxedUnitClass.tpe)
+ jsig(BoxedUnitTpe)
else if (sym == NothingClass)
jsig(RuntimeNothingClass.tpe)
else if (sym == NullClass)
jsig(RuntimeNullClass.tpe)
else if (isPrimitiveValueClass(sym)) {
- if (!primitiveOK) jsig(ObjectClass.tpe)
- else if (sym == UnitClass) jsig(BoxedUnitClass.tpe)
+ if (!primitiveOK) jsig(ObjectTpe)
+ else if (sym == UnitClass) jsig(BoxedUnitTpe)
else abbrvTag(sym).toString
}
else if (sym.isDerivedValueClass) {
@@ -340,10 +341,9 @@ abstract class Erasure extends AddInterfaces
case _ => tp.deconst
}
}
-
+
// ## requires a little translation
private lazy val poundPoundMethods = Set[Symbol](Any_##, Object_##)
-
// Methods on Any/Object which we rewrite here while we still know what
// is a primitive and what arrived boxed.
private lazy val interceptedMethods = poundPoundMethods ++ primitiveGetClassMethods
@@ -352,41 +352,10 @@ abstract class Erasure extends AddInterfaces
override def newTyper(context: Context) = new Eraser(context)
- private def safeToRemoveUnbox(cls: Symbol): Boolean =
- (cls == definitions.NullClass) || isBoxedValueClass(cls)
-
- /** An extractor object for unboxed expressions (maybe subsumed by posterasure?) */
- object Unboxed {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case Apply(fn, List(arg)) if isUnbox(fn.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
- Some(arg)
- case Apply(
- TypeApply(
- cast @ Select(
- Apply(
- sel @ Select(arg, acc),
- List()),
- asinstanceof),
- List(tpt)),
- List())
- if cast.symbol == Object_asInstanceOf &&
- tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox =>
- Some(arg)
- case _ =>
- None
- }
- }
-
- /** An extractor object for boxed expressions (maybe subsumed by posterasure?) */
- object Boxed {
- def unapply(tree: Tree): Option[Tree] = tree match {
- case Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)) if (tpt.tpe.typeSymbol.isDerivedValueClass) =>
- Some(arg)
- case LabelDef(name, params, Boxed(rhs)) =>
- Some(treeCopy.LabelDef(tree, name, params, rhs) setType rhs.tpe)
- case _ =>
- None
+ private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
+ isUnbox(fn.symbol) && {
+ val cls = arg.tpe.typeSymbol
+ (cls == definitions.NullClass) || isBoxedValueClass(cls)
}
}
@@ -399,7 +368,7 @@ abstract class Erasure extends AddInterfaces
val bridgeTarget = mutable.HashMap[Symbol, Symbol]()
var bridges = List[Tree]()
- val opc = beforeExplicitOuter {
+ val opc = enteringExplicitOuter {
new overridingPairs.Cursor(root) {
override def parents = List(root.info.firstParent)
override def exclude(sym: Symbol) = !sym.isMethod || sym.isPrivate || super.exclude(sym)
@@ -411,10 +380,10 @@ abstract class Erasure extends AddInterfaces
val member = opc.overriding
val other = opc.overridden
//println("bridge? " + member + ":" + member.tpe + member.locationString + " to " + other + ":" + other.tpe + other.locationString)//DEBUG
- if (beforeExplicitOuter(!member.isDeferred))
+ if (enteringExplicitOuter(!member.isDeferred))
checkPair(member, other)
- opc.next
+ opc.next()
}
(bridges, toBeRemoved)
}
@@ -441,11 +410,11 @@ abstract class Erasure extends AddInterfaces
sm"""bridge generated for member ${fulldef(member)}
|which overrides ${fulldef(other)}
|clashes with definition of $what;
- |both have erased type ${afterPostErasure(bridge.tpe)}""")
+ |both have erased type ${exitingPostErasure(bridge.tpe)}""")
}
for (bc <- root.baseClasses) {
- if (settings.debug.value)
- afterPostErasure(println(
+ if (settings.debug)
+ exitingPostErasure(println(
sm"""check bridge overrides in $bc
|${bc.info.nonPrivateDecl(bridge.name)}
|${site.memberType(bridge)}
@@ -454,13 +423,13 @@ abstract class Erasure extends AddInterfaces
def overriddenBy(sym: Symbol) =
sym.matchingSymbol(bc, site).alternatives filter (sym => !sym.isBridge)
- for (overBridge <- afterPostErasure(overriddenBy(bridge))) {
+ for (overBridge <- exitingPostErasure(overriddenBy(bridge))) {
if (overBridge == member) {
clashError("the member itself")
} else {
val overMembers = overriddenBy(member)
if (!overMembers.exists(overMember =>
- afterPostErasure(overMember.tpe =:= overBridge.tpe))) {
+ exitingPostErasure(overMember.tpe =:= overBridge.tpe))) {
clashError(fulldef(overBridge))
}
}
@@ -471,7 +440,7 @@ abstract class Erasure extends AddInterfaces
def checkPair(member: Symbol, other: Symbol) {
val otpe = specialErasure(root)(other.tpe)
- val bridgeNeeded = afterErasure (
+ val bridgeNeeded = exitingErasure (
!member.isMacro &&
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
@@ -484,7 +453,7 @@ abstract class Erasure extends AddInterfaces
if (!bridgeNeeded)
return
- val newFlags = (member.flags | BRIDGE) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
+ val newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED)
val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos
debuglog("generating bridge from %s (%s): %s to %s: %s".format(
@@ -499,9 +468,9 @@ abstract class Erasure extends AddInterfaces
if (!(member.tpe exists (_.typeSymbol.isDerivedValueClass)) ||
checkBridgeOverrides(member, other, bridge)) {
- afterErasure(root.info.decls enter bridge)
+ exitingErasure(root.info.decls enter bridge)
if (other.owner == root) {
- afterErasure(root.info.decls.unlink(other))
+ exitingErasure(root.info.decls.unlink(other))
toBeRemoved += other
}
@@ -510,7 +479,7 @@ abstract class Erasure extends AddInterfaces
}
}
- def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = afterErasure {
+ def makeBridgeDefDef(bridge: Symbol, member: Symbol, other: Symbol) = exitingErasure {
// type checking ensures we can safely call `other`, but unless `member.tpe <:< other.tpe`,
// calling `member` is not guaranteed to succeed in general, there's
// nothing we can do about this, except for an unapply: when this subtype test fails,
@@ -521,10 +490,10 @@ abstract class Erasure extends AddInterfaces
def maybeWrap(bridgingCall: Tree): Tree = {
val guardExtractor = ( // can't statically know which member is going to be selected, so don't let this depend on member.isSynthetic
(member.name == nme.unapply || member.name == nme.unapplySeq)
- && !afterErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
+ && !exitingErasure((member.tpe <:< other.tpe))) // no static guarantees (TODO: is the subtype test ever true?)
import CODE._
- val _false = FALSE_typed
+ val _false = FALSE
val pt = member.tpe.resultType
lazy val zero =
if (_false.tpe <:< pt) _false
@@ -574,12 +543,7 @@ abstract class Erasure extends AddInterfaces
val tree1 = tree.tpe match {
case ErasedValueType(tref) =>
val clazz = tref.sym
- tree match {
- case Unboxed(arg) if arg.tpe.typeSymbol == clazz =>
- log("shortcircuiting unbox -> box "+arg); arg
- case _ =>
- New(clazz, cast(tree, underlyingOfValueClass(clazz)))
- }
+ New(clazz, cast(tree, underlyingOfValueClass(clazz)))
case _ =>
tree.tpe.typeSymbol match {
case UnitClass =>
@@ -589,17 +553,17 @@ abstract class Erasure extends AddInterfaces
case x =>
assert(x != ArrayClass)
tree match {
- /** Can't always remove a Box(Unbox(x)) combination because the process of boxing x
- * may lead to throwing an exception.
+ /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x
+ * may lead to throwing an exception.
*
- * This is important for specialization: calls to the super constructor should not box/unbox specialized
- * fields (see TupleX). (ID)
+ * This is important for specialization: calls to the super constructor should not box/unbox specialized
+ * fields (see TupleX). (ID)
*/
- case Apply(boxFun, List(arg)) if isUnbox(tree.symbol) && safeToRemoveUnbox(arg.tpe.typeSymbol) =>
+ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
arg
case _ =>
- (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectClass.tpe
+ (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
}
}
}
@@ -630,24 +594,18 @@ abstract class Erasure extends AddInterfaces
case _ =>
val tree1 = pt match {
case ErasedValueType(tref) =>
- tree match {
- case Boxed(arg) if arg.tpe.isInstanceOf[ErasedValueType] =>
- log("shortcircuiting box -> unbox "+arg)
- arg
- case _ =>
- val clazz = tref.sym
- log("not boxed: "+tree)
- lazy val underlying = underlyingOfValueClass(clazz)
- val tree0 =
- if (tree.tpe.typeSymbol == NullClass &&
- isPrimitiveValueClass(underlying.typeSymbol)) {
- // convert `null` directly to underlying type, as going
- // via the unboxed type would yield a NPE (see SI-5866)
- unbox1(tree, underlying)
- } else
- Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
- cast(tree0, pt)
- }
+ val clazz = tref.sym
+ log("not boxed: "+tree)
+ lazy val underlying = underlyingOfValueClass(clazz)
+ val tree0 =
+ if (tree.tpe.typeSymbol == NullClass &&
+ isPrimitiveValueClass(underlying.typeSymbol)) {
+ // convert `null` directly to underlying type, as going
+ // via the unboxed type would yield a NPE (see SI-5866)
+ unbox1(tree, underlying)
+ } else
+ Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+ cast(tree0, pt)
case _ =>
pt.typeSymbol match {
case UnitClass =>
@@ -685,7 +643,7 @@ abstract class Erasure extends AddInterfaces
* @return the adapted tree
*/
private def adaptToType(tree: Tree, pt: Type): Tree = {
- if (settings.debug.value && pt != WildcardType)
+ if (settings.debug && pt != WildcardType)
log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
if (tree.tpe <:< pt)
tree
@@ -725,20 +683,12 @@ abstract class Erasure extends AddInterfaces
tree match {
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
- val qual1 = typedQualifier(qual, NOmode, ObjectClass.tpe) // need to have an expected type, see #3037
- val qualClass = qual1.tpe.typeSymbol
-/*
- val targClass = targ.tpe.typeSymbol
+ val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
- if (isNumericValueClass(qualClass) && isNumericValueClass(targClass))
- // convert numeric type casts
- atPos(tree.pos)(Apply(Select(qual1, "to" + targClass.name), List()))
- else
-*/
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
case ErasedValueType(tref) =>
- atPhase(currentRun.erasurePhase) {
+ enteringPhase(currentRun.erasurePhase) {
isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
}
case _ =>
@@ -746,11 +696,10 @@ abstract class Erasure extends AddInterfaces
}
if (noNullCheckNeeded) unbox(qual1, targ.tpe)
else {
- def nullConst = Literal(Constant(null)) setType NullClass.tpe
val untyped =
// util.trace("new asinstanceof test") {
gen.evalOnce(qual1, context.owner, context.unit) { qual =>
- If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullClass.tpe)),
+ If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullTpe)),
Literal(Constant(null)) setType targ.tpe,
unbox(qual(), targ.tpe))
}
@@ -791,7 +740,7 @@ abstract class Erasure extends AddInterfaces
tree.symbol = NoSymbol
selectFrom(qual1)
} else if (isMethodTypeWithEmptyParams(qual1.tpe)) {
- assert(qual1.symbol.isStable, qual1.symbol);
+ assert(qual1.symbol.isStable, qual1.symbol)
val applied = Apply(qual1, List()) setPos qual1.pos setType qual1.tpe.resultType
adaptMember(selectFrom(applied))
} else if (!(qual1.isInstanceOf[Super] || (qual1.tpe.typeSymbol isSubClass tree.symbol.owner))) {
@@ -812,19 +761,19 @@ abstract class Erasure extends AddInterfaces
/** A replacement for the standard typer's adapt method.
*/
- override protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree =
+ override protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree =
adaptToType(tree, pt)
/** A replacement for the standard typer's `typed1` method.
*/
- override def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
val tree1 = try {
tree match {
case InjectDerivedValue(arg) =>
(tree.attachments.get[TypeRefAttachment]: @unchecked) match {
case Some(itype) =>
val tref = itype.tpe
- val argPt = atPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+ val argPt = enteringPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
log(s"transforming inject $arg -> $tref/$argPt")
val result = typed(arg, mode, argPt)
log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
@@ -852,7 +801,7 @@ abstract class Erasure extends AddInterfaces
newCdef setType newCdef.body.tpe
}
def adaptBranch(branch: Tree): Tree =
- if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe);
+ if (branch == EmptyTree) branch else adaptToType(branch, tree1.tpe)
tree1 match {
case If(cond, thenp, elsep) =>
@@ -868,8 +817,7 @@ abstract class Erasure extends AddInterfaces
alt => alt == first || !(first.tpe looselyMatches alt.tpe)
}
if (tree.symbol ne sym1) {
- tree1.symbol = sym1
- tree1.tpe = sym1.tpe
+ tree1 setSymbol sym1 setType sym1.tpe
}
}
tree1
@@ -897,20 +845,20 @@ abstract class Erasure extends AddInterfaces
private def checkNoDoubleDefs(root: Symbol) {
def doubleDefError(sym1: Symbol, sym2: Symbol) {
// the .toString must also be computed at the earlier phase
- val tpe1 = afterRefchecks(root.thisType.memberType(sym1))
- val tpe2 = afterRefchecks(root.thisType.memberType(sym2))
+ val tpe1 = exitingRefchecks(root.thisType.memberType(sym1))
+ val tpe2 = exitingRefchecks(root.thisType.memberType(sym2))
if (!tpe1.isErroneous && !tpe2.isErroneous)
unit.error(
if (sym1.owner == root) sym1.pos else root.pos,
(if (sym1.owner == sym2.owner) "double definition:\n"
else if (sym1.owner == root) "name clash between defined and inherited member:\n"
else "name clash between inherited members:\n") +
- sym1 + ":" + afterRefchecks(tpe1.toString) +
+ sym1 + ":" + exitingRefchecks(tpe1.toString) +
(if (sym1.owner == root) "" else sym1.locationString) + " and\n" +
- sym2 + ":" + afterRefchecks(tpe2.toString) +
+ sym2 + ":" + exitingRefchecks(tpe2.toString) +
(if (sym2.owner == root) " at line " + (sym2.pos).line else sym2.locationString) +
"\nhave same type" +
- (if (afterRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + afterPostErasure(sym1.tpe)))
+ (if (exitingRefchecks(tpe1 =:= tpe2)) "" else " after erasure: " + exitingPostErasure(sym1.tpe)))
sym1.setInfo(ErrorType)
}
@@ -920,7 +868,7 @@ abstract class Erasure extends AddInterfaces
if (e.sym.isTerm) {
var e1 = decls.lookupNextEntry(e)
while (e1 ne null) {
- if (afterPostErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
+ if (exitingPostErasure(e1.sym.info =:= e.sym.info)) doubleDefError(e.sym, e1.sym)
e1 = decls.lookupNextEntry(e1)
}
}
@@ -928,16 +876,17 @@ abstract class Erasure extends AddInterfaces
}
val opc = new overridingPairs.Cursor(root) {
- override def exclude(sym: Symbol): Boolean =
- (!sym.isTerm || sym.isPrivate || super.exclude(sym)
- // specialized members have no type history before 'specialize', causing double def errors for curried defs
- || !sym.hasTypeAt(currentRun.refchecksPhase.id))
+ override def exclude(sym: Symbol): Boolean = (
+ !sym.isTerm || sym.isPrivate || super.exclude(sym)
+ // specialized members have no type history before 'specialize', causing double def errors for curried defs
+ || !sym.hasTypeAt(currentRun.refchecksPhase.id)
+ )
override def matches(sym1: Symbol, sym2: Symbol): Boolean =
- afterPostErasure(sym1.tpe =:= sym2.tpe)
+ exitingPostErasure(sym1.tpe =:= sym2.tpe)
}
while (opc.hasNext) {
- if (!afterRefchecks(
+ if (!exitingRefchecks(
root.thisType.memberType(opc.overriding) matches
root.thisType.memberType(opc.overridden))) {
debuglog("" + opc.overriding.locationString + " " +
@@ -946,7 +895,7 @@ abstract class Erasure extends AddInterfaces
opc.overridden.infosString)
doubleDefError(opc.overriding, opc.overridden)
}
- opc.next
+ opc.next()
}
}
@@ -956,8 +905,8 @@ abstract class Erasure extends AddInterfaces
for (member <- root.info.nonPrivateMember(other.name).alternatives) {
if (member != other &&
!(member hasFlag BRIDGE) &&
- afterErasure(member.tpe =:= other.tpe) &&
- !afterRefchecks(
+ exitingErasure(member.tpe =:= other.tpe) &&
+ !exitingRefchecks(
root.thisType.memberType(member) matches root.thisType.memberType(other))) {
debuglog("" + member.locationString + " " + member.infosString + other.locationString + " " + other.infosString);
doubleDefError(member, other)
@@ -1020,7 +969,6 @@ abstract class Erasure extends AddInterfaces
case Select(qual, _) => qual
case TypeApply(Select(qual, _), _) => qual
}
-
def preEraseAsInstanceOf = {
(fn: @unchecked) match {
case TypeApply(Select(qual, _), List(targ)) =>
@@ -1037,7 +985,7 @@ abstract class Erasure extends AddInterfaces
def preEraseIsInstanceOf = {
fn match {
case TypeApply(sel @ Select(qual, name), List(targ)) =>
- if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefClass.tpe)
+ if (qual.tpe != null && isPrimitiveValueClass(qual.tpe.typeSymbol) && targ.tpe != null && targ.tpe <:< AnyRefTpe)
unit.error(sel.pos, "isInstanceOf cannot test if value types are references.")
def mkIsInstanceOf(q: () => Tree)(tp: Type): Tree =
@@ -1048,7 +996,7 @@ abstract class Erasure extends AddInterfaces
List()) setPos tree.pos
targ.tpe match {
case SingleType(_, _) | ThisType(_) | SuperType(_, _) =>
- val cmpOp = if (targ.tpe <:< AnyValClass.tpe) Any_equals else Object_eq
+ val cmpOp = if (targ.tpe <:< AnyValTpe) Any_equals else Object_eq
atPos(tree.pos) {
Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
@@ -1078,7 +1026,9 @@ abstract class Erasure extends AddInterfaces
preEraseAsInstanceOf
} else if (fn.symbol == Any_isInstanceOf) {
preEraseIsInstanceOf
- } else if (fn.symbol.owner.isRefinementClass && !fn.symbol.isOverridingSymbol) {
+ } else if (fn.symbol.isOnlyRefinementMember) {
+ // !!! Another spot where we produce overloaded types (see test pos/t6301)
+ log(s"${fn.symbol.fullLocationString} originates in refinement class - call will be implemented via reflection.")
ApplyDynamic(qualifier, args) setSymbol fn.symbol setPos tree.pos
} else if (fn.symbol.isMethodWithExtension && !fn.symbol.tpe.isErroneous) {
Apply(gen.mkAttributedRef(extensionMethods.extensionMethod(fn.symbol)), qualifier :: args)
@@ -1137,7 +1087,8 @@ abstract class Erasure extends AddInterfaces
SelectFromArray(qual, name, erasure(tree.symbol)(qual.tpe)).copyAttrs(fn),
args)
}
- } else if (args.isEmpty && interceptedMethods(fn.symbol)) {
+ }
+ else if (args.isEmpty && interceptedMethods(fn.symbol)) {
if (poundPoundMethods.contains(fn.symbol)) {
// This is unattractive, but without it we crash here on ().## because after
// erasure the ScalaRunTime.hash overload goes from Unit => Int to BoxedUnit => Int.
@@ -1149,13 +1100,24 @@ abstract class Erasure extends AddInterfaces
case s @ (ShortClass | ByteClass | CharClass) => numericConversion(qual, s)
case BooleanClass => If(qual, LIT(true.##), LIT(false.##))
case _ =>
- global.typer.typed(gen.mkRuntimeCall(nme.hash_, List(qual)))
+ // Since we are past typer, we need to avoid creating trees carrying
+ // overloaded types. This logic is custom (and technically incomplete,
+ // although serviceable) for def hash. What is really needed is for
+ // the overloading logic presently hidden away in a few different
+ // places to be properly exposed so we can just call "resolveOverload"
+ // after typer. Until then:
+ val alts = ScalaRunTimeModule.info.member(nme.hash_).alternatives
+ def alt1 = alts find (_.info.paramTypes.head =:= qual.tpe)
+ def alt2 = ScalaRunTimeModule.info.member(nme.hash_) suchThat (_.info.paramTypes.head.typeSymbol == AnyClass)
+ val newTree = gen.mkRuntimeCall(nme.hash_, qual :: Nil) setSymbol (alt1 getOrElse alt2)
+
+ global.typer.typed(newTree)
}
} else if (isPrimitiveValueClass(qual.tpe.typeSymbol)) {
// Rewrite 5.getClass to ScalaRunTime.anyValClass(5)
global.typer.typed(gen.mkRuntimeCall(nme.anyValClass, List(qual, typer.resolveClassTag(tree.pos, qual.tpe.widen))))
} else if (primitiveGetClassMethods.contains(fn.symbol)) {
- // if we got here then we're trying to send a primitive getClass method to either
+ // if we got here then we're trying to send a primitive getClass method to either
// a) an Any, in which cage Object_getClass works because Any erases to object. Or
//
// b) a non-primitive, e.g. because the qualifier's type is a refinement type where one parent
@@ -1194,12 +1156,19 @@ abstract class Erasure extends AddInterfaces
preErase(fun)
case Select(qual, name) =>
- val owner = tree.symbol.owner
- // println("preXform: "+ (tree, tree.symbol, tree.symbol.owner, tree.symbol.owner.isRefinementClass))
+ val sym = tree.symbol
+ val owner = sym.owner
if (owner.isRefinementClass) {
- val overridden = tree.symbol.nextOverriddenSymbol
- assert(overridden != NoSymbol, tree.symbol)
- tree.symbol = overridden
+ sym.allOverriddenSymbols filterNot (_.owner.isRefinementClass) match {
+ case overridden :: _ =>
+ log(s"${sym.fullLocationString} originates in refinement class - replacing with ${overridden.fullLocationString}.")
+ tree.symbol = overridden
+ case Nil =>
+ // Ideally this should not be reached or reachable; anything which would
+ // get here should have been caught in the surrounding Apply.
+ devWarning(s"Failed to rewrite reflective apply - now don't know what to do with " + tree)
+ return treeCopy.Select(tree, gen.mkAttributedCast(qual, qual.tpe.widen), name)
+ }
}
def isAccessible(sym: Symbol) = localTyper.context.isAccessible(sym, sym.owner.thisType)
@@ -1266,13 +1235,12 @@ abstract class Erasure extends AddInterfaces
tree1 setType specialScalaErasure(tree1.tpe)
case ArrayValue(elemtpt, trees) =>
treeCopy.ArrayValue(
- tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform) setType null
+ tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType()
case DefDef(_, _, _, _, tpt, _) =>
- val result = super.transform(tree1) setType null
- tpt.tpe = specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
- result
+ try super.transform(tree1).clearType()
+ finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType
case _ =>
- super.transform(tree1) setType null
+ super.transform(tree1).clearType()
}
}
}
@@ -1284,11 +1252,11 @@ abstract class Erasure extends AddInterfaces
override def transform(tree: Tree): Tree = {
val tree1 = preTransformer.transform(tree)
// log("tree after pretransform: "+tree1)
- afterErasure {
+ exitingErasure {
val tree2 = mixinTransformer.transform(tree1)
// debuglog("tree after addinterfaces: \n" + tree2)
- newTyper(rootContext(unit, tree, true)).typed(tree2)
+ newTyper(rootContext(unit, tree, erasedTypes = true)).typed(tree2)
}
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 970519ab7c..48263a496e 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -3,14 +3,14 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import matching.{ Patterns, ParallelMatching }
import scala.tools.nsc.settings.ScalaVersion
/** This class ...
@@ -19,15 +19,12 @@ import scala.tools.nsc.settings.ScalaVersion
* @version 1.0
*/
abstract class ExplicitOuter extends InfoTransform
- with Patterns
- with ParallelMatching
with TypingTransformers
with ast.TreeDSL
{
import global._
import definitions._
import CODE._
- import Debug.TRACE
/** The following flags may be set by this phase: */
override def phaseNewFlags: Long = notPROTECTED
@@ -76,28 +73,20 @@ abstract class ExplicitOuter extends InfoTransform
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
- case Bind(_, body) if toRemove(tree.symbol) =>
- TRACE("Dropping unused binding: " + tree.symbol)
- super.transform(body)
+ case Bind(_, body) if toRemove(tree.symbol) => super.transform(body)
case _ => super.transform(tree)
}
}
- /** Issue a migration warning for instance checks which might be on an Array and
- * for which the type parameter conforms to Seq, because these answers changed in 2.8.
- */
- def isArraySeqTest(lhs: Type, rhs: Type) =
- (ArrayClass.tpe <:< lhs.widen) && (rhs.widen matchesPattern SeqClass.tpe)
-
def outerAccessor(clazz: Symbol): Symbol = {
val firstTry = clazz.info.decl(nme.expandedName(nme.OUTER, clazz))
if (firstTry != NoSymbol && firstTry.outerSource == clazz) firstTry
else findOrElse(clazz.info.decls)(_.outerSource == clazz)(NoSymbol)
}
def newOuterAccessor(clazz: Symbol) = {
- val accFlags = SYNTHETIC | ARTIFACT | METHOD | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
+ val accFlags = SYNTHETIC | ARTIFACT | STABLE | ( if (clazz.isTrait) DEFERRED else 0 )
val sym = clazz.newMethod(nme.OUTER, clazz.pos, accFlags)
- val restpe = if (clazz.isTrait) clazz.outerClass.tpe else clazz.outerClass.thisType
+ val restpe = if (clazz.isTrait) clazz.outerClass.tpe_* else clazz.outerClass.thisType
sym expandName clazz
sym.referenced = clazz
@@ -139,7 +128,7 @@ abstract class ExplicitOuter extends InfoTransform
* <ol>
* <li>
* Add an outer parameter to the formal parameters of a constructor
- * in a inner non-trait class;
+ * in an inner non-trait class;
* </li>
* <li>
* Add a protected $outer field to an inner class which is
@@ -187,16 +176,13 @@ abstract class ExplicitOuter extends InfoTransform
var decls1 = decls
if (isInner(clazz) && !clazz.isInterface) {
decls1 = decls.cloneScope
- val outerAcc = clazz.newMethod(nme.OUTER, clazz.pos) // 3
- outerAcc expandName clazz
-
- decls1 enter newOuterAccessor(clazz)
+ decls1 enter newOuterAccessor(clazz) // 3
if (hasOuterField(clazz)) //2
decls1 enter newOuterField(clazz)
}
if (!clazz.isTrait && !parents.isEmpty) {
for (mc <- clazz.mixinClasses) {
- val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
+ val mixinOuterAcc: Symbol = exitingExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
if (skipMixinOuterAccessor(clazz, mc))
debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc")
@@ -281,11 +267,6 @@ abstract class ExplicitOuter extends InfoTransform
* <blockquote><pre>`base'.$outer$$C1 ... .$outer$$Cn</pre></blockquote>
* which refers to the outer instance of class to of
* value base. The result is typed but not positioned.
- *
- * @param base ...
- * @param from ...
- * @param to ...
- * @return ...
*/
protected def outerPath(base: Tree, from: Symbol, to: Symbol): Tree = {
//Console.println("outerPath from "+from+" to "+to+" at "+base+":"+base.tpe)
@@ -394,11 +375,11 @@ abstract class ExplicitOuter extends InfoTransform
*/
def outerAccessorDef: Tree = {
val outerAcc = outerAccessor(currentClass)
- var rhs: Tree =
+ val rhs: Tree =
if (outerAcc.isDeferred) EmptyTree
else This(currentClass) DOT outerField(currentClass)
- /** If we don't re-type the tree, we see self-type related crashes like #266.
+ /* If we don't re-type the tree, we see self-type related crashes like #266.
*/
localTyper typed {
(DEF(outerAcc) withPos currentClass.pos withType null) === rhs
@@ -431,74 +412,6 @@ abstract class ExplicitOuter extends InfoTransform
}
}
- // requires settings.XoldPatmat.value
- def matchTranslation(tree: Match) = {
- val Match(selector, cases) = tree
- var nselector = transform(selector)
-
- def makeGuardDef(vs: List[Symbol], guard: Tree) = {
- val gdname = unit.freshTermName("gd")
- val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
- val params = method newSyntheticValueParams vs.map(_.tpe)
- method setInfo new MethodType(params, BooleanClass.tpe)
-
- localTyper typed {
- DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params)
- }
- }
-
- val nguard = new ListBuffer[Tree]
- val ncases =
- for (CaseDef(pat, guard, body) <- cases) yield {
- // Strip out any unused pattern bindings up front
- val patternIdents = for (b @ Bind(_, _) <- pat) yield b.symbol
- val references: Set[Symbol] = Set(guard, body) flatMap { t => for (id @ Ident(name) <- t) yield id.symbol }
- val (used, unused) = patternIdents partition references
- val strippedPat = if (unused.isEmpty) pat else new RemoveBindingsTransformer(unused.toSet) transform pat
-
- val gdcall =
- if (guard == EmptyTree) EmptyTree
- else {
- val guardDef = makeGuardDef(used, guard)
- nguard += transform(guardDef) // building up list of guards
-
- localTyper typed (Ident(guardDef.symbol) APPLY (used map Ident))
- }
-
- (CASE(transform(strippedPat)) IF gdcall) ==> transform(body)
- }
-
- val (checkExhaustive, requireSwitch) = nselector match {
- case Typed(nselector1, tpt) =>
- val unchecked = tpt.tpe hasAnnotation UncheckedClass
- if (unchecked)
- nselector = nselector1
-
- // Don't require a tableswitch if there are 1-2 casedefs
- // since the matcher intentionally emits an if-then-else.
- (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2)
- case _ =>
- (true, false)
- }
-
- val t = atPos(tree.pos) {
- val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
- val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context)
-
- /* if @switch annotation is present, verify the resulting tree is a Match */
- if (requireSwitch) t_untyped match {
- case Block(_, Match(_, _)) => // ok
- case _ =>
- unit.error(tree.pos, "could not emit switch for @switch annotated match")
- }
-
- localTyper.typed(t_untyped, context.matchResultType)
- }
-
- if (nguard.isEmpty) t
- else Block(nguard.toList, t) setType t.tpe
- }
-
/** The main transformation method */
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
@@ -583,14 +496,10 @@ abstract class ExplicitOuter extends InfoTransform
})
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
- // entry point for pattern matcher translation
- case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer
- matchTranslation(m)
-
// for the new pattern matcher
// base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
// TODO remove the synthetic `<outer>` method from outerFor??
- case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value =>
+ case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) =>
val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
val acc = outerAccessor(outerFor)
@@ -599,24 +508,17 @@ abstract class ExplicitOuter extends InfoTransform
// at least don't crash... this duplicates maybeOmittable from constructors
(acc.owner.isEffectivelyFinal && !acc.isOverridingSymbol)) {
unit.uncheckedWarning(tree.pos, "The outer reference in this type test cannot be checked at run time.")
- return transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
+ transform(TRUE) // urgh... drop condition if there's no accessor (or if it may disappear after constructors)
} else {
// println("(base, acc)= "+(base, acc))
val outerSelect = localTyper typed Apply(Select(base, acc), Nil)
// achieves the same as: localTyper typed atPos(tree.pos)(outerPath(base, base.tpe.typeSymbol, outerFor.outerClass))
// println("(b, tpsym, outerForI, outerFor, outerClass)= "+ (base, base.tpe.typeSymbol, outerFor, sel.symbol.owner, outerFor.outerClass))
// println("outerSelect = "+ outerSelect)
- return transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
+ transform(treeCopy.Apply(tree, treeCopy.Select(eqsel, outerSelect, eq), args))
}
case _ =>
- if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
- case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
- if (isArraySeqTest(qual.tpe, args.head.tpe))
- unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
- case _ => ()
- }
-
val x = super.transform(tree)
if (x.tpe eq null) x
else x setType transformInfo(currentOwner, x.tpe)
@@ -625,7 +527,7 @@ abstract class ExplicitOuter extends InfoTransform
/** The transformation method for whole compilation units */
override def transformUnit(unit: CompilationUnit) {
- afterExplicitOuter(super.transformUnit(unit))
+ exitingExplicitOuter(super.transformUnit(unit))
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index bc54054028..672d9d232a 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -8,9 +8,6 @@ package transform
import symtab._
import Flags._
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable
-import scala.tools.nsc.util.FreshNameCreator
-import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple }
/**
* Perform Step 1 in the inline classes SIP: Creates extension methods for all
@@ -23,7 +20,6 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
import global._ // the global environment
import definitions._ // standard classes and methods
- import typer.{ typed, atOwner } // methods to type trees
/** the following two members override abstract members in Transform */
val phaseName: String = "extmethods"
@@ -70,7 +66,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
/** Return the extension method that corresponds to given instance method `meth`. */
- def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
+ def extensionMethod(imeth: Symbol): Symbol = enteringPhase(currentRun.refchecksPhase) {
val companionInfo = companionModuleForce(imeth.owner).info
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
@@ -87,7 +83,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
|
| ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
|
- | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}" """)
matching.head
}
@@ -185,6 +181,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
// bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
// good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
}
+
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -206,7 +203,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val companion = origThis.companionModule
def makeExtensionMethodSymbol = {
- val extensionName = extensionNames(origMeth).head
+ val extensionName = extensionNames(origMeth).head.toTermName
val extensionMeth = (
companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
setAnnotations origMeth.annotations
@@ -256,12 +253,13 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] =
super.transformStats(stats, exprOwner) map {
- case md @ ModuleDef(_, _, _) if extensionDefs contains md.symbol =>
- val defns = extensionDefs(md.symbol).toList map (member =>
- atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(member))
- )
- extensionDefs -= md.symbol
- deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ defns))
+ case md @ ModuleDef(_, _, _) =>
+ val extraStats = extensionDefs remove md.symbol match {
+ case Some(defns) => defns.toList map (defn => atOwner(md.symbol)(localTyper.typedPos(md.pos.focus)(defn.duplicate)))
+ case _ => Nil
+ }
+ if (extraStats.isEmpty) md
+ else deriveModuleDef(md)(tmpl => deriveTemplate(tmpl)(_ ++ extraStats))
case stat =>
stat
}
diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala
index cd26f95958..44d39de205 100644
--- a/src/compiler/scala/tools/nsc/transform/Flatten.scala
+++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala
@@ -8,28 +8,23 @@ package transform
import symtab._
import Flags._
-import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
abstract class Flatten extends InfoTransform {
import global._
- import definitions._
+ import treeInfo.isQualifierSafeToElide
/** the following two members override abstract members in Transform */
val phaseName: String = "flatten"
- /** Updates the owning scope with the given symbol; returns the old symbol.
+ /** Updates the owning scope with the given symbol, unlinking any others.
*/
- private def replaceSymbolInCurrentScope(sym: Symbol): Symbol = afterFlatten {
+ private def replaceSymbolInCurrentScope(sym: Symbol): Unit = exitingFlatten {
val scope = sym.owner.info.decls
- val old = scope lookup sym.name andAlso scope.unlink
+ val old = (scope lookupUnshadowedEntries sym.name).toList
+ old foreach (scope unlink _)
scope enter sym
-
- if (old eq NoSymbol)
- log(s"lifted ${sym.fullLocationString}")
- else
- log(s"lifted ${sym.fullLocationString} after unlinking existing $old from scope.")
-
+ log(s"lifted ${sym.fullLocationString}" + ( if (old.isEmpty) "" else s" after unlinking $old from scope." ))
old
}
@@ -53,7 +48,7 @@ abstract class Flatten extends InfoTransform {
clazz.isClass && !clazz.isPackageClass && {
// Cannot flatten here: class A[T] { object B }
// was "at erasurePhase.prev"
- beforeErasure(clazz.typeParams.isEmpty)
+ enteringErasure(clazz.typeParams.isEmpty)
}
}
@@ -67,11 +62,11 @@ abstract class Flatten extends InfoTransform {
val decls1 = scopeTransform(clazz) {
val decls1 = newScope
if (clazz.isPackageClass) {
- afterFlatten { decls foreach (decls1 enter _) }
+ exitingFlatten { decls foreach (decls1 enter _) }
}
else {
val oldowner = clazz.owner
- afterFlatten { oldowner.info }
+ exitingFlatten { oldowner.info }
parents1 = parents mapConserve (this)
for (sym <- decls) {
@@ -90,7 +85,7 @@ abstract class Flatten extends InfoTransform {
val restp1 = apply(restp)
if (restp1 eq restp) tp else copyMethodType(tp, params, restp1)
case PolyType(tparams, restp) =>
- val restp1 = apply(restp);
+ val restp1 = apply(restp)
if (restp1 eq restp) tp else PolyType(tparams, restp1)
case _ =>
mapOver(tp)
@@ -122,8 +117,14 @@ abstract class Flatten extends InfoTransform {
case ClassDef(_, _, _, _) if sym.isNestedClass =>
liftedDefs(sym.enclosingTopLevelClass.owner) += tree
EmptyTree
- case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) =>
- afterFlatten(atPos(tree.pos)(gen.mkAttributedRef(sym)))
+ case Select(qual, name) if sym.isStaticModule && !sym.isTopLevel =>
+ exitingFlatten {
+ atPos(tree.pos) {
+ val ref = gen.mkAttributedRef(sym)
+ if (isQualifierSafeToElide(qual)) ref
+ else Block(List(qual), ref).setType(tree.tpe) // need to execute the qualifier but refer directly to the lifted module.
+ }
+ }
case _ =>
tree
}
diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
index b6dbacaa29..dc321e26ca 100644
--- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala
@@ -10,11 +10,11 @@ package transform
* An InfoTransform contains a compiler phase that transforms trees and symbol infos -- making sure they stay consistent.
* The symbol info is transformed assuming it is consistent right before this phase.
* The info transformation is triggered by Symbol::rawInfo, which caches the results in the symbol's type history.
- * This way sym.info (during an atPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
+ * This way sym.info (during an enteringPhase(p)) can look up what the symbol's info should look like at the beginning of phase p.
* (If the transformed info had not been stored yet, rawInfo will compute the info by composing the info-transformers
* of the most recent phase before p, up to the transformer of the phase right before p.)
*
- * Concretely, atPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
+ * Concretely, enteringPhase(p) { sym.info } yields the info *before* phase p has transformed it. Imagine you're a phase and it all makes sense.
*/
trait InfoTransform extends Transform {
import global.{Symbol, Type, InfoTransformer, infoTransformers}
diff --git a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
index 0af3cf732f..1bbe1b8410 100644
--- a/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/InlineErasure.scala
@@ -1,9 +1,11 @@
package scala.tools.nsc
package transform
-trait InlineErasure { self: Erasure =>
-
+trait InlineErasure {
+ self: Erasure =>
+
+/*
import global._
import definitions._
-
-} \ No newline at end of file
+ */
+}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 631468dd0c..7888198531 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -32,6 +32,21 @@ abstract class LambdaLift extends InfoTransform {
}
}
+ /** scala.runtime.*Ref classes */
+ private lazy val allRefClasses: Set[Symbol] = {
+ refClass.values.toSet ++ volatileRefClass.values.toSet ++ Set(VolatileObjectRefClass, ObjectRefClass)
+ }
+
+ /** Each scala.runtime.*Ref class has a static method `create(value)` that simply instantiates the Ref to carry that value. */
+ private lazy val refCreateMethod: Map[Symbol, Symbol] = {
+ mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.create))
+ }
+
+ /** Quite frequently a *Ref is initialized with its zero (e.g., null, 0.toByte, etc.) Method `zero()` of *Ref class encapsulates that pattern. */
+ private lazy val refZeroMethod: Map[Symbol, Symbol] = {
+ mapFrom(allRefClasses.toList)(x => getMemberMethod(x.companionModule, nme.zero))
+ }
+
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isCapturedVariable) capturedVariableType(sym, tpe = lifted(tp), erasedTypes = true)
else lifted(tp)
@@ -143,7 +158,7 @@ abstract class LambdaLift extends InfoTransform {
ss addEntry sym
renamable addEntry sym
changedFreeVars = true
- debuglog("" + sym + " is free in " + enclosure);
+ debuglog("" + sym + " is free in " + enclosure)
if (sym.isVariable) sym setFlag CAPTURED
}
!enclosure.isClass
@@ -161,7 +176,7 @@ abstract class LambdaLift extends InfoTransform {
private val freeVarTraverser = new Traverser {
override def traverse(tree: Tree) {
try { //debug
- val sym = tree.symbol;
+ val sym = tree.symbol
tree match {
case ClassDef(_, _, _, _) =>
liftedDefs(tree.symbol) = Nil
@@ -245,16 +260,15 @@ abstract class LambdaLift extends InfoTransform {
freshen(sym.name + nme.NAME_JOIN_STRING + sym.owner.name + nme.NAME_JOIN_STRING)
} else {
// SI-5652 If the lifted symbol is accessed from an inner class, it will be made public. (where?)
- // Generating a a unique name, mangled with the enclosing class name, avoids a VerifyError
+ // Generating a unique name, mangled with the enclosing class name, avoids a VerifyError
// in the case that a sub-class happens to lifts out a method with the *same* name.
- val name = freshen(sym.name + nme.NAME_JOIN_STRING)
- if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name, sym.enclClass)
+ val name = freshen("" + sym.name + nme.NAME_JOIN_STRING)
+ if (originalName.isTermName && !sym.enclClass.isImplClass && calledFromInner(sym)) nme.expandedName(name.toTermName, sym.enclClass)
else name
}
}
- /** Rename a trait's interface and implementation class in coordinated fashion.
- */
+ /* Rename a trait's interface and implementation class in coordinated fashion. */
def renameTrait(traitSym: Symbol, implSym: Symbol) {
val originalImplName = implSym.name
renameSym(traitSym)
@@ -290,7 +304,7 @@ abstract class LambdaLift extends InfoTransform {
proxies(owner) =
for (fv <- freeValues.toList) yield {
val proxyName = proxyNames.getOrElse(fv, fv.name)
- val proxy = owner.newValue(proxyName, owner.pos, newFlags) setInfo fv.info
+ val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong) setInfo fv.info
if (owner.isClass) owner.info.decls enter proxy
proxy
}
@@ -445,56 +459,21 @@ abstract class LambdaLift extends InfoTransform {
case ValDef(mods, name, tpt, rhs) =>
if (sym.isCapturedVariable) {
val tpt1 = TypeTree(sym.tpe) setPos tpt.pos
- /* Creating a constructor argument if one isn't present. */
- val constructorArg = rhs match {
- case EmptyTree =>
- sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
- case List(tp) => gen.mkZero(tp)
- case _ =>
- debugwarn("Couldn't determine how to properly construct " + sym)
- rhs
- }
- case arg => arg
+
+ val refTypeSym = sym.tpe.typeSymbol
+
+ val factoryCall = typer.typedPos(rhs.pos) {
+ rhs match {
+ case EmptyTree =>
+ val zeroMSym = refZeroMethod(refTypeSym)
+ gen.mkMethodCall(zeroMSym, Nil)
+ case arg =>
+ val createMSym = refCreateMethod(refTypeSym)
+ gen.mkMethodCall(createMSym, arg :: Nil)
+ }
}
-
- /** Wrap expr argument in new *Ref(..) constructor. But try/catch
- * is a problem because a throw will clear the stack and post catch
- * we would expect the partially-constructed object to be on the stack
- * for the call to init. So we recursively
- * search for "leaf" result expressions where we know its safe
- * to put the new *Ref(..) constructor or, if all else fails, transform
- * an expr to { val temp=expr; new *Ref(temp) }.
- * The reason we narrowly look for try/catch in captured var definitions
- * is because other try/catch expression have already been lifted
- * see SI-6863
- */
- def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
- // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
- // a try/catch in final expression position.
- case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
- New(sym.tpe, expr)
- case Try(block, catches, finalizer) =>
- Try(refConstr(block), catches map refConstrCase, finalizer)
- case Block(stats, expr) =>
- Block(stats, refConstr(expr))
- case If(cond, trueBranch, falseBranch) =>
- If(cond, refConstr(trueBranch), refConstr(falseBranch))
- case Match(selector, cases) =>
- Match(selector, cases map refConstrCase)
- // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
- // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
- // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
- case _ =>
- debuglog("assigning expr to temp: " + (expr.pos))
- val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
- val tempDef = ValDef(tempSym, expr) setPos expr.pos
- val tempRef = Ident(tempSym) setPos expr.pos
- Block(tempDef, New(sym.tpe, tempRef))
- }}
- def refConstrCase(cdef: CaseDef): CaseDef =
- CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
-
- treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
+
+ treeCopy.ValDef(tree, mods, name, tpt1, factoryCall)
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 21213cf9d9..15ca916ac1 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -68,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
curTree = tree
tree match {
-
+
case Block(_, _) =>
val block1 = super.transform(tree)
val Block(stats, expr) = block1
@@ -79,7 +79,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
List(stat)
})
treeCopy.Block(block1, stats1, expr)
-
+
case DefDef(_, _, _, _, _, rhs) => atOwner(tree.symbol) {
val (res, slowPathDef) = if (!sym.owner.isClass && sym.isLazy) {
val enclosingClassOrDummyOrMethod = {
@@ -100,9 +100,9 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
val (rhs1, sDef) = mkLazyDef(enclosingClassOrDummyOrMethod, transform(rhs), idx, sym)
sym.resetFlag((if (lazyUnit(sym)) 0 else LAZY) | ACCESSOR)
(rhs1, sDef)
- } else
+ } else
(transform(rhs), EmptyTree)
-
+
val ddef1 = deriveDefDef(tree)(_ => if (LocalLazyValFinder.find(res)) typed(addBitmapDefs(sym, res)) else res)
if (slowPathDef != EmptyTree) Block(slowPathDef, ddef1) else ddef1
}
@@ -183,16 +183,16 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bmps.isEmpty) rhs else rhs match {
case Block(assign, l @ LabelDef(name, params, _))
- if name.toString == ("_" + methSym.name) && isMatch(params) =>
+ if (name string_== "_" + methSym.name) && isMatch(params) =>
Block(assign, deriveLabelDef(l)(rhs => typed(prependStats(bmps, rhs))))
case _ => prependStats(bmps, rhs)
}
}
-
+
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): Tree = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, STABLE | PRIVATE)
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, STABLE | PRIVATE)
defSym setInfo MethodType(List(), lzyVal.tpe.resultType)
defSym.owner = lzyVal.owner
debuglog(s"crete slow compute path $defSym with owner ${defSym.owner} for lazy val $lzyVal")
@@ -201,8 +201,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
val rhs: Tree = (gen.mkSynchronizedCheck(clazz, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
DEF(defSym).mkTree(addBitmapDefs(lzyVal, BLOCK(rhs, retVal))) setSymbol defSym
}
-
-
+
+
def mkFastPathBody(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree): (Tree, Tree) = {
val slowPathDef: Tree = mkSlowPathDef(clazz, lzyVal, cond, syncBody, stats, retVal)
@@ -221,7 +221,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
* Similarly as for normal lazy val members (see Mixin), the result will be a tree of the form
* { if ((bitmap&n & MASK) == 0) this.l$compute()
* else l$
- *
+ *
* def l$compute() = { synchronized(enclosing_class_or_dummy) {
* if ((bitmap$n & MASK) == 0) {
* l$ = <rhs>
@@ -277,8 +277,8 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD
if (bmps.length > n)
bmps(n)
else {
- val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteClass.tpe)
- beforeTyper {
+ val sym = meth.newVariable(nme.newBitmapName(nme.BITMAP_NORMAL, n), meth.pos).setInfo(ByteTpe)
+ enteringTyper {
sym addAnnotation VolatileAttr
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index c9c68d080d..eca958fd8d 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -27,14 +27,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
/** Map a lazy, mixedin field accessor to it's trait member accessor */
- private val initializer = perRunCaches.newMap[Symbol, Symbol]
+ private val initializer = perRunCaches.newMap[Symbol, Symbol]()
// --------- helper functions -----------------------------------------------
/** A member of a trait is implemented statically if its implementation after the
* mixin transform is in the static implementation module. To be statically
* implemented, a member must be a method that belonged to the trait's implementation class
- * before (e.g. it is not abstract). Not statically implemented are
+ * before (i.e. it is not abstract). Not statically implemented are
* - non-private modules: these are implemented directly in the mixin composition class
* (private modules, on the other hand, are implemented statically, but their
* module variable is not. all such private modules are lifted, because
@@ -68,7 +68,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* maps all other types to themselves.
*/
private def toInterface(tp: Type): Type =
- beforeMixin(tp.typeSymbol.toInterface).tpe
+ enteringMixin(tp.typeSymbol.toInterface).tpe
private def isFieldWithBitmap(field: Symbol) = {
field.info // ensure that nested objects are transformed
@@ -86,7 +86,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Note: The `checkinit` option does not check if transient fields are initialized.
*/
private def needsInitFlag(sym: Symbol) = (
- settings.checkInit.value
+ settings.checkInit
&& sym.isGetter
&& !sym.isInitializedToDefault
&& !sym.hasFlag(PARAMACCESSOR | SPECIALIZED | LAZY)
@@ -102,7 +102,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
case TypeRef(pre, sym, args) if sym.isImplClass =>
- typeRef(pre, beforeMixin(sym.toInterface), args)
+ typeRef(pre, enteringMixin(sym.toInterface), args)
case _ => tp
})
}
@@ -119,14 +119,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- afterPickler {
+ exitingPickler {
var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
" " + mixinClass + " " + base.info.baseClasses + "/" + bcs)
while (!bcs.isEmpty && sym == NoSymbol) {
- if (settings.debug.value) {
- val other = bcs.head.info.nonPrivateDecl(member.name);
+ if (settings.debug) {
+ val other = bcs.head.info.nonPrivateDecl(member.name)
debuglog("rebindsuper " + bcs.head + " " + other + " " + other.tpe +
" " + other.isDeferred)
}
@@ -148,7 +148,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
sym =>
isConcreteAccessor(sym) &&
!sym.hasFlag(MIXEDIN) &&
- matchesType(sym.tpe, member.tpe, true))
+ matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true))
}
( bcs.head != member.owner
&& (hasOverridingAccessor(bcs.head) || isOverriddenAccessor(member, bcs.tail))
@@ -165,7 +165,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
- val newSym = beforeErasure {
+ val newSym = enteringErasure {
// since we used `mixinMember` from the interface that represents the trait that's
// being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
// info) as they are seen from the class. We can't use the member that we get from the
@@ -185,11 +185,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
newSym updateInfo (mixinMember.info cloneInfo newSym)
}
- def needsExpandedSetterName(field: Symbol) = !field.isLazy && (
- if (field.isMethod) field.hasStableFlag
- else !field.isMutable
- )
-
/** Add getters and setters for all non-module fields of an implementation
* class to its interface unless they are already present. This is done
* only once per class. The mixedin flag is used to remember whether late
@@ -197,32 +192,29 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - lazy fields don't get a setter.
*/
def addLateInterfaceMembers(clazz: Symbol) {
- def makeConcrete(member: Symbol) =
- member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
-
if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
assert(phase == currentRun.mixinPhase, phase)
- /** Create a new getter. Getters are never private or local. They are
+ /* Create a new getter. Getters are never private or local. They are
* always accessors and deferred. */
def newGetter(field: Symbol): Symbol = {
// println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE))
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE )
// TODO preserve pre-erasure info?
- clazz.newMethod(nme.getterName(field.name), field.pos, newFlags) setInfo MethodType(Nil, field.info)
+ clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType(Nil, field.info)
}
- /** Create a new setter. Setters are never private or local. They are
- * always accessors and deferred. */
+ /* Create a new setter. Setters are never private or local. They are
+ * always accessors and deferred. */
def newSetter(field: Symbol): Symbol = {
//println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE))
- val setterName = nme.getterToSetter(nme.getterName(field.name))
+ val setterName = field.setterName
val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED
val setter = clazz.newMethod(setterName, field.pos, newFlags)
// TODO preserve pre-erasure info?
- setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitClass.tpe)
- if (needsExpandedSetterName(field))
+ setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe)
+ if (field.needsExpandedSetterName)
setter.name = nme.expandedSetterName(setter.name, clazz)
setter
@@ -240,12 +232,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val getter = member.getter(clazz)
if (getter == NoSymbol) addMember(clazz, newGetter(member))
if (!member.tpe.isInstanceOf[ConstantType] && !member.isLazy) {
- val setter = member.setter(clazz, needsExpandedSetterName(member))
+ val setter = member.setter(clazz)
if (setter == NoSymbol) addMember(clazz, newSetter(member))
}
}
}
- debuglog("new defs of " + clazz + " = " + clazz.info.decls);
+ debuglog("new defs of " + clazz + " = " + clazz.info.decls)
}
}
@@ -267,7 +259,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
resetFlag DEFERRED | lateDEFERRED
)
- /** Mix in members of implementation class mixinClass into class clazz */
+ /* Mix in members of implementation class mixinClass into class clazz */
def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
if (!mixinClass.isImplClass) debugwarn ("Impl class flag is not set " +
((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
@@ -276,23 +268,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val imember = member overriddenSymbol mixinInterface
imember overridingSymbol clazz match {
case NoSymbol =>
- if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+ if (clazz.info.findMember(member.name, 0, lateDEFERRED, stableOnly = false).alternatives contains imember)
cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
case _ =>
}
}
}
- /** Mix in members of trait mixinClass into class clazz. Also,
- * for each lazy field in mixinClass, add a link from its mixed in member to its
- * initializer method inside the implclass.
+ /* Mix in members of trait mixinClass into class clazz. Also,
+ * for each lazy field in mixinClass, add a link from its mixed in member to its
+ * initializer method inside the implclass.
*/
def mixinTraitMembers(mixinClass: Symbol) {
// For all members of a trait's interface do:
for (mixinMember <- mixinClass.info.decls) {
if (isConcreteAccessor(mixinMember)) {
if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
- debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+ devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}")
else {
// mixin field accessors
val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
@@ -311,14 +303,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// mixinMember is a value of type unit. No field needed
;
case _ => // otherwise mixin a field as well
- // atPhase: the private field is moved to the implementation class by erasure,
+ // enteringPhase: the private field is moved to the implementation class by erasure,
// so it can no longer be found in the mixinMember's owner (the trait)
- val accessed = beforePickler(mixinMember.accessed)
+ val accessed = enteringPickler(mixinMember.accessed)
// #3857, need to retain info before erasure when cloning (since cloning only
// carries over the current entry in the type history)
- val sym = beforeErasure {
+ val sym = enteringErasure {
// so we have a type history entry before erasure
- clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType)
}
sym updateInfo mixinMember.tpe.resultType // info at current phase
@@ -379,35 +371,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
var parents1 = parents
var decls1 = decls
if (!clazz.isPackageClass) {
- afterMixin(clazz.owner.info)
+ exitingMixin(clazz.owner.info)
if (clazz.isImplClass) {
clazz setFlag lateMODULE
var sourceModule = clazz.owner.info.decls.lookup(sym.name.toTermName)
- if (sourceModule != NoSymbol) {
- sourceModule setPos sym.pos
- if (sourceModule.flags != MODULE) {
- log("!!! Directly setting sourceModule flags from %s to MODULE".format(flagsToString(sourceModule.flags)))
- sourceModule.flags = MODULE
- }
- }
- else {
+ if (sourceModule == NoSymbol) {
sourceModule = (
clazz.owner.newModuleSymbol(sym.name.toTermName, sym.pos, MODULE)
setModuleClass sym.asInstanceOf[ClassSymbol]
)
clazz.owner.info.decls enter sourceModule
}
+ else {
+ sourceModule setPos sym.pos
+ if (sourceModule.flags != MODULE) {
+ log("!!! Directly setting sourceModule flags from %s to MODULE".format(sourceModule.flagString))
+ sourceModule.flags = MODULE
+ }
+ }
sourceModule setInfo sym.tpe
// Companion module isn't visible for anonymous class at this point anyway
- assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass,
- clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe)
+ assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, s"$clazz has no sourceModule: $sym ${sym.tpe}")
parents1 = List()
decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*)
} else if (!parents.isEmpty) {
parents1 = parents.head :: (parents.tail map toInterface)
}
}
- //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
+ //decls1 = enteringPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug
if ((parents1 eq parents) && (decls1 eq decls)) tp
else ClassInfoType(parents1, decls1, clazz)
@@ -437,7 +428,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Assign(lhs, rhs) => traverse(rhs) // assignments don't count
case _ =>
- if (tree.hasSymbol && tree.symbol != NoSymbol) {
+ if (tree.hasSymbolField && tree.symbol != NoSymbol) {
val sym = tree.symbol
if ((sym.hasAccessorFlag || (sym.isTerm && !sym.isMethod))
&& sym.isPrivate
@@ -515,7 +506,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - create a new method definition that also has a `self` parameter
* (which comes first) Iuli: this position is assumed by tail call elimination
* on a different receiver. Storing a new 'this' assumes it is located at
- * index 0 in the local variable table. See 'STORE_THIS' and GenJVM/GenMSIL.
+ * index 0 in the local variable table. See 'STORE_THIS' and GenASM.
* - Map implementation class types in type-apply's to their interfaces
* - Remove all fields in implementation classes
*/
@@ -524,7 +515,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree match {
case Template(parents, self, body) =>
localTyper = erasure.newTyper(rootContext.make(tree, currentOwner))
- afterMixin(currentOwner.owner.info)//todo: needed?
+ exitingMixin(currentOwner.owner.info)//todo: needed?
if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner))
addMixedinMembers(currentOwner, unit)
@@ -543,17 +534,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else EmptyTree
}
else {
- if (currentOwner.isTrait && sym.isSetter && !beforePickler(sym.isDeferred)) {
+ if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) {
sym.addAnnotation(TraitSetterAnnotationClass)
}
tree
}
+ // !!! What is this doing, and why is it only looking for exactly
+ // one type parameter? It would seem to be
+ // "Map implementation class types in type-apply's to their interfaces"
+ // from the comment on preTransform, but is there some way we should know
+ // that impl class types in type applies can only appear in single
+ // type parameter type constructors?
case Apply(tapp @ TypeApply(fn, List(arg)), List()) =>
if (arg.tpe.typeSymbol.isImplClass) {
val ifacetpe = toInterface(arg.tpe)
- arg.tpe = ifacetpe
- tapp.tpe = MethodType(List(), ifacetpe)
- tree.tpe = ifacetpe
+ arg setType ifacetpe
+ tapp setType MethodType(Nil, ifacetpe)
+ tree setType ifacetpe
}
tree
case ValDef(_, _, _, _) if currentOwner.isImplClass =>
@@ -590,8 +587,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree
}
- /** Create a static reference to given symbol <code>sym</code> of the
- * form <code>M.sym</code> where M is the symbol's implementation module.
+ /** Create a static reference to given symbol `sym` of the
+ * form `M.sym` where M is the symbol's implementation module.
*/
private def staticRef(sym: Symbol): Tree = {
sym.owner.info //todo: needed?
@@ -647,34 +644,34 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
private def addNewDefs(clazz: Symbol, stats: List[Tree]): List[Tree] = {
val newDefs = mutable.ListBuffer[Tree]()
- /** Attribute given tree and anchor at given position */
+ /* Attribute given tree and anchor at given position */
def attributedDef(pos: Position, tree: Tree): Tree = {
debuglog("add new def to " + clazz + ": " + tree)
typedPos(pos)(tree)
}
- /** The position of given symbol, or, if this is undefined,
- * the position of the current class.
+ /* The position of given symbol, or, if this is undefined,
+ * the position of the current class.
*/
def position(sym: Symbol) =
if (sym.pos == NoPosition) clazz.pos else sym.pos
- /** Add tree at given position as new definition */
+ /* Add tree at given position as new definition */
def addDef(pos: Position, tree: Tree) {
newDefs += attributedDef(pos, tree)
}
- /** Add new method definition.
+ /* Add new method definition.
*
- * @param sym The method symbol.
- * @param rhs The method body.
+ * @param sym The method symbol.
+ * @param rhs The method body.
*/
def addDefDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), DefDef(sym, rhs))
def addValDef(sym: Symbol, rhs: Tree = EmptyTree) = addDef(position(sym), ValDef(sym, rhs))
- /** Add `newdefs` to `stats`, removing any abstract method definitions
- * in <code>stats</code> that are matched by some symbol defined in
- * <code>newDefs</code>.
+ /* Add `newdefs` to `stats`, removing any abstract method definitions
+ * in `stats` that are matched by some symbol defined in
+ * `newDefs`.
*/
def add(stats: List[Tree], newDefs: List[Tree]) = {
val newSyms = newDefs map (_.symbol)
@@ -690,30 +687,30 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else newDefs ::: (stats filter isNotDuplicate)
}
- /** If `stat` is a superaccessor, complete it by adding a right-hand side.
- * Note: superaccessors are always abstract until this point.
- * The method to call in a superaccessor is stored in the accessor symbol's alias field.
- * The rhs is:
- * super.A(xs) where A is the super accessor's alias and xs are its formal parameters.
- * This rhs is typed and then mixin transformed.
+ /* If `stat` is a superaccessor, complete it by adding a right-hand side.
+ * Note: superaccessors are always abstract until this point.
+ * The method to call in a superaccessor is stored in the accessor symbol's alias field.
+ * The rhs is:
+ * super.A(xs) where A is the super accessor's alias and xs are its formal parameters.
+ * This rhs is typed and then mixin transformed.
*/
def completeSuperAccessor(stat: Tree) = stat match {
case DefDef(_, _, _, vparams :: Nil, _, EmptyTree) if stat.symbol.isSuperAccessor =>
val rhs0 = (Super(clazz, tpnme.EMPTY) DOT stat.symbol.alias)(vparams map (v => Ident(v.symbol)): _*)
val rhs1 = localTyped(stat.pos, rhs0, stat.symbol.tpe.resultType)
- deriveDefDef(stat)(_ => beforeMixin(transform(rhs1)))
+ deriveDefDef(stat)(_ => enteringMixin(transform(rhs1)))
case _ =>
stat
}
- /**
+ /*
* Return the bitmap field for 'offset'. Depending on the hierarchy it is possible to reuse
* the bitmap of its parents. If that does not exist yet we create one.
*/
def bitmapFor(clazz0: Symbol, offset: Int, field: Symbol): Symbol = {
val category = bitmapCategory(field)
- val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field))
+ val bitmapName = nme.newBitmapName(category, offset / flagsPerBitmap(field)).toTermName
val sym = clazz0.info.decl(bitmapName)
assert(!sym.isOverloaded, sym)
@@ -721,7 +718,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def createBitmap: Symbol = {
val bitmapKind = bitmapKindForCategory(category)
val sym = clazz0.newVariable(bitmapName, clazz0.pos) setInfo bitmapKind.tpe
- beforeTyper(sym addAnnotation VolatileAttr)
+ enteringTyper(sym addAnnotation VolatileAttr)
category match {
case nme.BITMAP_TRANSIENT | nme.BITMAP_CHECKINIT_TRANSIENT => sym addAnnotation TransientAttr
@@ -749,7 +746,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
if (kind == LongClass ) LIT(1L << realOffset) else LIT(1 << realOffset)
}
- /** Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
+ /* Return an (untyped) tree of the form 'Clazz.this.bmp = Clazz.this.bmp | mask'. */
def mkSetFlag(clazz: Symbol, offset: Int, valSym: Symbol, kind: ClassSymbol): Tree = {
val bmp = bitmapFor(clazz, offset, valSym)
def mask = maskForOffset(offset, valSym, kind)
@@ -759,8 +756,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
x === newValue
}
- /** Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
- * precise comparison operator depending on the value of 'equalToZero'.
+ /* Return an (untyped) tree of the form 'clazz.this.bitmapSym & mask (==|!=) 0', the
+ * precise comparison operator depending on the value of 'equalToZero'.
*/
def mkTest(clazz: Symbol, mask: Tree, bitmapSym: Symbol, equalToZero: Boolean, kind: ClassSymbol): Tree = {
val bitmapTree = (This(clazz) DOT bitmapSym)
@@ -777,7 +774,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def mkSlowPathDef(clazz: Symbol, lzyVal: Symbol, cond: Tree, syncBody: List[Tree],
stats: List[Tree], retVal: Tree, attrThis: Tree, args: List[Tree]): Symbol = {
- val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name), lzyVal.pos, PRIVATE)
+ val defSym = clazz.newMethod(nme.newLazyValSlowComputeName(lzyVal.name.toTermName), lzyVal.pos, PRIVATE)
val params = defSym newSyntheticValueParams args.map(_.symbol.tpe)
defSym setInfoAndEnter MethodType(params, lzyVal.tpe.resultType)
val rhs: Tree = (gen.mkSynchronizedCheck(attrThis, cond, syncBody, stats)).changeOwner(currentOwner -> defSym)
@@ -798,19 +795,19 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
- /** Always copy the tree if we are going to perform sym substitution,
- * otherwise we will side-effect on the tree that is used in the fast path
- */
- class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
- override def transform(tree: Tree): Tree =
- if (tree.hasSymbol && from.contains(tree.symbol))
- super.transform(tree.duplicate)
- else super.transform(tree.duplicate)
+ /* Always copy the tree if we are going to perform sym substitution,
+ * otherwise we will side-effect on the tree that is used in the fast path
+ */
+ class TreeSymSubstituterWithCopying(from: List[Symbol], to: List[Symbol]) extends TreeSymSubstituter(from, to) {
+ override def transform(tree: Tree): Tree =
+ if (tree.hasSymbolField && from.contains(tree.symbol))
+ super.transform(tree.duplicate)
+ else super.transform(tree.duplicate)
- override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
- }
+ override def apply[T <: Tree](tree: T): T = if (from.isEmpty) tree else super.apply(tree)
+ }
- /** return a 'lazified' version of rhs. It uses double-checked locking to ensure
+ /* return a 'lazified' version of rhs. It uses double-checked locking to ensure
* initialization is performed at most once. For performance reasons the double-checked
* locking is split into two parts, the first (fast) path checks the bitmap without
* synchronizing, and if that fails it initializes the lazy val within the
@@ -819,8 +816,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Private fields used only in this initializer are subsequently set to null.
*
* @param clazz The class symbol
+ * @param lzyVal The symbol of this lazy field
* @param init The tree which initializes the field ( f = <rhs> )
- * @param fieldSym The symbol of this lazy field
* @param offset The offset of this field in the flags bitmap
*
* The result will be a tree of the form
@@ -853,7 +850,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset, lzyVal)
val kind = bitmapKind(lzyVal)
val mask = maskForOffset(offset, lzyVal, kind)
- def cond = mkTest(clazz, mask, bitmapSym, true, kind)
+ def cond = mkTest(clazz, mask, bitmapSym, equalToZero = true, kind)
val nulls = lazyValNullables(lzyVal).toList sortBy (_.id) map nullify
def syncBody = init ::: List(mkSetFlag(clazz, offset, lzyVal, kind), UNIT)
@@ -870,7 +867,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
- abort("Invalid getter " + rhs + " for module in class " + clazz)
+ abort(s"Invalid getter $rhs for module in $clazz")
}
def mkCheckedAccessor(clazz: Symbol, retVal: Tree, offset: Int, pos: Position, fieldSym: Symbol): Tree = {
@@ -878,20 +875,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset, sym)
val kind = bitmapKind(sym)
val mask = maskForOffset(offset, sym, kind)
- val msg = "Uninitialized field: " + unit.source + ": " + pos.line
+ val msg = s"Uninitialized field: ${unit.source}: ${pos.line}"
val result =
- IF (mkTest(clazz, mask, bitmapSym, false, kind)) .
+ IF (mkTest(clazz, mask, bitmapSym, equalToZero = false, kind)) .
THEN (retVal) .
- ELSE (THROW(UninitializedErrorClass, LIT(msg)))
+ ELSE (Throw(NewFromConstructor(UninitializedFieldConstructor, LIT(msg))))
typedPos(pos)(BLOCK(result, retVal))
}
- /** Complete lazy field accessors. Applies only to classes,
- * for it's own (non inherited) lazy fields. If 'checkinit'
- * is enabled, getters that check for the initialized bit are
- * generated, and the class constructor is changed to set the
- * initialized bits.
+ /* Complete lazy field accessors. Applies only to classes,
+ * for it's own (non inherited) lazy fields. If 'checkinit'
+ * is enabled, getters that check for the initialized bit are
+ * generated, and the class constructor is changed to set the
+ * initialized bits.
*/
def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = {
def dd(stat: DefDef) = {
@@ -922,7 +919,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else if (sym.isConstructor) {
deriveDefDef(stat)(addInitBits(clazz, _))
}
- else if (settings.checkInit.value && !clazz.isTrait && sym.isSetter) {
+ else if (settings.checkInit && !clazz.isTrait && sym.isSetter) {
val getter = sym.getter(clazz)
if (needsInitFlag(getter) && fieldOffset.isDefinedAt(getter))
deriveDefDef(stat)(rhs => Block(List(rhs, localTyper.typed(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter)))), UNIT))
@@ -972,23 +969,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
}
- /** Adds statements to set the 'init' bit for each field initialized
- * in the body of a constructor.
+ /* Adds statements to set the 'init' bit for each field initialized
+ * in the body of a constructor.
*/
def addInitBits(clazz: Symbol, rhs: Tree): Tree =
new AddInitBitsTransformer(clazz) transform rhs
- def isCheckInitField(field: Symbol) =
- needsInitFlag(field) && !field.isDeferred
-
- def superClassesToCheck(clazz: Symbol) =
- clazz.ancestors filterNot (_ hasFlag TRAIT | JAVA)
-
// begin addNewDefs
- /** Fill the map from fields to offset numbers.
- * Instead of field symbols, the map keeps their getter symbols. This makes
- * code generation easier later.
+ /* Fill the map from fields to offset numbers.
+ * Instead of field symbols, the map keeps their getter symbols. This makes
+ * code generation easier later.
*/
def buildBitmapOffsets() {
def fold(fields: List[Symbol], category: Name) = {
@@ -1044,16 +1035,17 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
// if class is not a trait add accessor definitions
else if (!clazz.isTrait) {
- if (sym.hasAccessorFlag && (!sym.isDeferred || sym.hasFlag(lateDEFERRED))) {
+ // This needs to be a def to avoid sharing trees
+ def accessedRef = accessedReference(sym)
+ if (isConcreteAccessor(sym)) {
// add accessor definitions
addDefDef(sym, {
- val accessedRef = accessedReference(sym)
if (sym.isSetter) {
if (isOverriddenSetter(sym)) UNIT
else accessedRef match {
- case Literal(_) => accessedRef
- case _ =>
- val init = Assign(accessedRef, Ident(sym.firstParam))
+ case ref @ Literal(_) => ref
+ case ref =>
+ val init = Assign(ref, Ident(sym.firstParam))
val getter = sym.getter(clazz)
if (!needsInitFlag(getter)) init
@@ -1063,16 +1055,18 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
else if (needsInitFlag(sym))
mkCheckedAccessor(clazz, accessedRef, fieldOffset(sym), sym.pos, sym)
else
- gen.mkCheckInit(accessedRef)
+ accessedRef
})
}
else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) {
// add modules
- val vdef = gen.mkModuleVarDef(sym)
- addDef(position(sym), vdef)
+ val vsym = sym.owner.newModuleVarSymbol(sym)
+ addDef(position(sym), ValDef(vsym))
- val rhs = gen.newModule(sym, vdef.symbol.tpe)
- val assignAndRet = gen.mkAssignAndReturn(vdef.symbol, rhs)
+ // !!! TODO - unravel the enormous duplication between this code and
+ // eliminateModuleDefs in RefChecks.
+ val rhs = gen.newModule(sym, vsym.tpe)
+ val assignAndRet = gen.mkAssignAndReturn(vsym, rhs)
val attrThis = gen.mkAttributedThis(clazz)
val rhs1 = mkInnerClassAccessorDoubleChecked(attrThis, assignAndRet, sym, List())
@@ -1090,7 +1084,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// add forwarders
assert(sym.alias != NoSymbol, sym)
// debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString)
- if (!sym.isTermMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
+ if (!sym.isMacro) addDefDef(sym, Apply(staticRef(sym.alias), gen.mkAttributedThis(clazz) :: sym.paramss.head.map(Ident)))
}
}
}
@@ -1135,7 +1129,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
- tree.tpe = toInterface(tree.tpe)
+ tree modifyType toInterface
tree match {
case templ @ Template(parents, self, body) =>
@@ -1151,9 +1145,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
qual
case Apply(Select(qual, _), args) =>
- /** Changes <code>qual.m(args)</code> where m refers to an implementation
+ /* Changes `qual.m(args)` where m refers to an implementation
* class method to Q.m(S, args) where Q is the implementation module of
- * <code>m</code> and S is the self parameter for the call, which
+ * `m` and S is the self parameter for the call, which
* is determined as follows:
* - if qual != super, qual itself
* - if qual == super, and we are in an implementation class,
@@ -1164,7 +1158,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
def implSym = implClass(sym.owner).info.member(sym.name)
assert(target ne NoSymbol,
List(sym + ":", sym.tpe, sym.owner, implClass(sym.owner), implSym,
- beforePrevPhase(implSym.tpe), phase) mkString " "
+ enteringPrevPhase(implSym.tpe), phase) mkString " "
)
typedPos(tree.pos)(Apply(staticRef(target), transformSuper(qual) :: args))
}
@@ -1193,7 +1187,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
}
else {
- staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
+ staticCall(enteringPrevPhase(sym.overridingSymbol(implClass(sym.owner))))
}
}
else {
@@ -1211,7 +1205,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
tree
case Select(qual, name) if sym.owner.isImplClass && !isStaticOnly(sym) =>
- assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, flagsToString(sym.flags)))
+ assert(!sym.isMethod, "no method allowed here: %s%s %s".format(sym, sym.isImplOnly, sym.flagString))
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
@@ -1237,10 +1231,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
// assign to fields in some implementation class via an abstract
// setter in the interface.
- def setter = lhs.symbol.setter(
- toInterface(lhs.symbol.owner.tpe).typeSymbol,
- needsExpandedSetterName(lhs.symbol)
- ) setPos lhs.pos
+ def setter = lhs.symbol.setter(toInterface(lhs.symbol.owner.tpe).typeSymbol) setPos lhs.pos
typedPos(tree.pos)((qual DOT setter)(rhs))
@@ -1258,7 +1249,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val tree1 = super.transform(preTransform(tree))
// localTyper needed when not flattening inner classes. parts after an
// inner class will otherwise be typechecked with a wrong scope
- try afterMixin(postTransform(tree1))
+ try exitingMixin(postTransform(tree1))
finally localTyper = saved
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
index 67be81bd3c..2610679542 100644
--- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
+++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala
@@ -31,11 +31,11 @@ abstract class OverridingPairs {
private val self = base.thisType
/** Symbols to exclude: Here these are constructors, private locals,
- * and bridges. But it may be refined in subclasses.
+ * and hidden symbols, including bridges. But it may be refined in subclasses.
*
*/
protected def exclude(sym: Symbol): Boolean =
- sym.isConstructor || sym.isPrivateLocal || sym.hasFlag(BRIDGE)
+ sym.isConstructor || sym.isPrivateLocal || sym.isArtifact
/** The parents of base (may also be refined).
*/
@@ -86,10 +86,10 @@ abstract class OverridingPairs {
{ def fillDecls(bcs: List[Symbol], deferredflag: Int) {
if (!bcs.isEmpty) {
fillDecls(bcs.tail, deferredflag)
- var e = bcs.head.info.decls.elems;
+ var e = bcs.head.info.decls.elems
while (e ne null) {
if (e.sym.getFlag(DEFERRED) == deferredflag.toLong && !exclude(e.sym))
- decls enter e.sym;
+ decls enter e.sym
e = e.next
}
}
@@ -134,7 +134,7 @@ abstract class OverridingPairs {
private val subParents = new Array[BitSet](size)
{ for (i <- List.range(0, size))
- subParents(i) = new BitSet(size);
+ subParents(i) = new BitSet(size)
for (p <- parents) {
val pIndex = index(p.typeSymbol)
if (pIndex >= 0)
@@ -190,7 +190,7 @@ abstract class OverridingPairs {
if (nextEntry ne null) {
do {
do {
- nextEntry = decls.lookupNextEntry(nextEntry);
+ nextEntry = decls.lookupNextEntry(nextEntry)
/* DEBUG
if ((nextEntry ne null) &&
!(nextEntry.sym hasFlag PRIVATE) &&
@@ -208,19 +208,19 @@ abstract class OverridingPairs {
// overriding and nextEntry.sym
} while ((nextEntry ne null) && (hasCommonParentAsSubclass(overriding, nextEntry.sym)))
if (nextEntry ne null) {
- overridden = nextEntry.sym;
+ overridden = nextEntry.sym
//Console.println("yield: " + overriding + overriding.locationString + " / " + overridden + overridden.locationString);//DEBUG
} else {
do {
curEntry = curEntry.next
- } while ((curEntry ne null) && (visited contains curEntry));
+ } while ((curEntry ne null) && (visited contains curEntry))
nextEntry = curEntry
- next
+ next()
}
}
}
}
- next
+ next()
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index 3ef32caa2c..96263f3c0c 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -9,10 +9,10 @@ package transform
* performs peephole optimizations.
*/
trait PostErasure extends InfoTransform with TypingTransformers {
-
val global: Global
+
import global._
- import definitions._
+ import treeInfo._
val phaseName: String = "posterasure"
@@ -21,51 +21,33 @@ trait PostErasure extends InfoTransform with TypingTransformers {
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
- case ConstantType(Constant(tp: Type)) =>
- ConstantType(Constant(apply(tp)))
- case ErasedValueType(tref) =>
- atPhase(currentRun.erasurePhase)(erasure.erasedValueClassArg(tref))
- case _ => mapOver(tp)
+ case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
+ case ErasedValueType(tref) => enteringErasure(erasure.erasedValueClassArg(tref))
+ case _ => mapOver(tp)
}
}
def transformInfo(sym: Symbol, tp: Type) = elimErasedValueType(tp)
class PostErasureTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree) = {
+ def finish(res: Tree) = logResult(s"Posterasure reduction\n Old: $tree\n New")(res)
+
+ /* We use the name of the operation being performed and not the symbol
+ * itself because the symbol hails from the boxed class, and this transformation
+ * exists to operate directly on the values. So we are for instance looking
+ * up == on an lhs of type Int, whereas the symbol which has been passed in
+ * is from java.lang.Integer.
+ */
+ def binop(lhs: Tree, op: Symbol, rhs: Tree) =
+ finish(localTyper typed (Apply(Select(lhs, op.name) setPos tree.pos, rhs :: Nil) setPos tree.pos))
- override def transform(tree: Tree) =
super.transform(tree) setType elimErasedValueType(tree.tpe) match {
- case // new C(arg).underlying ==> arg
- Apply(sel @ Select(
- Apply(Select(New(tpt), nme.CONSTRUCTOR), List(arg)),
- acc), List())
- if atPhase(currentRun.erasurePhase) {
- tpt.tpe.typeSymbol.isDerivedValueClass &&
- sel.symbol == tpt.tpe.typeSymbol.derivedValueClassUnbox
- } =>
- if (settings.debug.value) log("Removing "+tree+" -> "+arg)
- arg
- case // new C(arg1) == new C(arg2) ==> arg1 == arg2
- Apply(sel @ Select(
- Apply(Select(New(tpt1), nme.CONSTRUCTOR), List(arg1)),
- cmp),
- List(Apply(Select(New(tpt2), nme.CONSTRUCTOR), List(arg2))))
- if atPhase(currentRun.erasurePhase) {
- tpt1.tpe.typeSymbol.isDerivedValueClass &&
- (sel.symbol == Object_== || sel.symbol == Object_!=) &&
- tpt2.tpe.typeSymbol == tpt1.tpe.typeSymbol
- } =>
- val result = Apply(Select(arg1, cmp) setPos sel.pos, List(arg2)) setPos tree.pos
- log("shortcircuiting equality "+tree+" -> "+result)
- localTyper.typed(result)
-
- case // arg.asInstanceOf[T] ==> arg if arg.tpe == T
- Apply(TypeApply(cast @ Select(arg, asinstanceof), List(tpt)), List())
- if cast.symbol == Object_asInstanceOf && arg.tpe =:= tpt.tpe => // !!! <:< ?
- if (settings.debug.value) log("Shortening "+tree+" -> "+arg)
- arg
- case tree1 =>
- tree1
+ case AsInstanceOf(v, tpe) if v.tpe <:< tpe => finish(v) // x.asInstanceOf[X] ==> x
+ case ValueClass.BoxAndUnbox(v) => finish(v) // (new B(v)).unbox ==> v
+ case ValueClass.BoxAndCompare(v1, op, v2) => binop(v1, op, v2) // new B(v1) == new B(v2) ==> v1 == v2
+ case tree => tree
}
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
index 44d8860916..cffb483072 100644
--- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
+++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala
@@ -11,9 +11,8 @@ package transform
abstract class SampleTransform extends Transform {
// inherits abstract value `global` and class `Phase` from Transform
- import global._ // the global environment
- import definitions._ // standard classes and methods
- import typer.{typed, atOwner} // methods to type trees
+ import global._ // the global environment
+ import typer.typed // method to type trees
/** the following two members override abstract members in Transform */
val phaseName: String = "sample-phase"
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 39716d4ed0..cc4b2d544b 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -3,13 +3,15 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import scala.tools.nsc.symtab.Flags
import scala.collection.{ mutable, immutable }
import scala.language.postfixOps
import scala.language.existentials
+import scala.annotation.tailrec
/** Specialize code on types.
*
@@ -50,7 +52,9 @@ import scala.language.existentials
*/
abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
import global._
+ import definitions._
import Flags._
+
/** the name of the phase: */
val phaseName: String = "specialize"
@@ -66,13 +70,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private implicit val typeOrdering: Ordering[Type] = Ordering[String] on ("" + _.typeSymbol.name)
- import definitions.{
- BooleanClass, UnitClass, ArrayClass,
- ScalaValueClasses, isPrimitiveValueClass, isPrimitiveValueType,
- SpecializedClass, UnspecializedClass, AnyRefClass, ObjectClass,
- GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass
- }
- import rootMirror.RootClass
/** TODO - this is a lot of maps.
*/
@@ -101,7 +98,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Concrete methods that use a specialized type, or override such methods. */
private val concreteSpecMethods = perRunCaches.newWeakSet[Symbol]()
- private def specializedTypes(tps: List[Symbol]) = tps filter (_.isSpecialized)
private def specializedOn(sym: Symbol): List[Symbol] = {
sym getAnnotation SpecializedClass match {
case Some(AnnotationInfo(_, Nil, _)) => specializableTypes.map(_.typeSymbol)
@@ -119,14 +115,30 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe),
+ @annotation.tailrec private def findSymbol[T](candidates: List[T], f: T => Symbol): Symbol = {
+ if (candidates.isEmpty) NoSymbol
+ else f(candidates.head) match {
+ case NoSymbol => findSymbol(candidates.tail, f)
+ case sym => sym
+ }
+ }
+ private def hasNewParents(tree: Tree) = {
+ val parents = tree.symbol.info.parents
+ val prev = enteringPrevPhase(tree.symbol.info.parents)
+ (parents != prev) && {
+ debuglog(s"$tree parents changed from: $prev to: $parents")
+ true
+ }
+ }
+
+ // If we replace `isBoundedGeneric` with (tp <:< AnyRefTpe),
// then pos/spec-List.scala fails - why? Does this kind of check fail
// for similar reasons? Does `sym.isAbstractType` make a difference?
private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = {
specializedOn(sym).exists(s => !isPrimitiveValueClass(s)) &&
!isPrimitiveValueClass(tp.typeSymbol) &&
isBoundedGeneric(tp)
- //(tp <:< AnyRefClass.tpe)
+ //(tp <:< AnyRefTpe)
}
object TypeEnv {
@@ -147,7 +159,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def includes(t1: TypeEnv, t2: TypeEnv) = t1 forall {
case (sym, tpe) =>
t2 get sym exists { t2tp =>
- (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefClass.tpe)
+ (tpe == t2tp) || !(isPrimitiveValueType(tpe) || isPrimitiveValueType(t2tp)) // u.t.b. (t2tp <:< AnyRefTpe)
}
}
@@ -163,22 +175,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
env forall { case (tvar, tpe) =>
tvar.isSpecialized && (concreteTypes(tvar) contains tpe) && {
(sym.typeParams contains tvar) ||
- (sym.owner != RootClass && (sym.owner.typeParams contains tvar))
+ (sym.owner != rootMirror.RootClass && (sym.owner.typeParams contains tvar))
}
}
}
}
- /** Returns the generic class that was specialized to 'sClass', or
- * 'sClass' itself if sClass is not a specialized subclass.
- */
- def genericClass(sClass: Symbol): Symbol =
- if (sClass.isSpecialized) sClass.superClass
- else sClass
-
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
- def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesSym(sym1: Symbol) = sym.info =:= sym1.info
def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
}
private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
@@ -207,8 +212,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* type bounds of other @specialized type parameters (and not in its result type).
*/
def degenerate = false
-
- def isAccessor = false
}
/** Symbol is a special overloaded method of 'original', in the environment env. */
@@ -226,11 +229,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def target = t
}
- /** Symbol is a specialized accessor for the `target` field. */
- case class SpecializedAccessor(target: Symbol) extends SpecializedInfo {
- override def isAccessor = true
+ /** Symbol is a special overload of the super accessor. */
+ case class SpecialSuperAccessor(t: Symbol) extends SpecializedInfo {
+ def target = t
}
+ /** Symbol is a specialized accessor for the `target` field. */
+ case class SpecializedAccessor(target: Symbol) extends SpecializedInfo { }
+
/** Symbol is a specialized method whose body should be the target's method body. */
case class Implementation(target: Symbol) extends SpecializedInfo
@@ -268,9 +274,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedParams(sym: Symbol): List[Symbol] =
sym.info.typeParams filter (_.isSpecialized)
- def splitParams(tps: List[Symbol]) =
- tps partition (_.isSpecialized)
-
/** Given an original class symbol and a list of types its type parameters are instantiated at
* returns a list of type parameters that should remain in the TypeRef when instantiating a
* specialized type.
@@ -286,7 +289,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// when searching for a specialized class, take care to map all
// type parameters that are subtypes of AnyRef to AnyRef
val args1 = map2(args, sym.info.typeParams)((tp, orig) =>
- if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefClass.tpe
+ if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefTpe
else tp
)
specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match {
@@ -315,20 +318,20 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Specialize name for the two list of types. The first one denotes
* specialization on method type parameters, the second on outer environment.
*/
- private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = {
- if (nme.INITIALIZER == name || (types1.isEmpty && types2.isEmpty))
- name
+ private def specializedName(name: Name, types1: List[Type], types2: List[Type]): TermName = (
+ if (name == nme.CONSTRUCTOR || (types1.isEmpty && types2.isEmpty))
+ name.toTermName
else if (nme.isSetterName(name))
- nme.getterToSetter(specializedName(nme.setterToGetter(name), types1, types2))
+ specializedName(name.getterName, types1, types2).setterName
else if (nme.isLocalName(name))
- nme.getterToLocal(specializedName(nme.localToGetter(name), types1, types2))
+ specializedName(name.getterName, types1, types2).localName
else {
val (base, cs, ms) = nme.splitSpecializedName(name)
newTermName(base.toString + "$"
- + "m" + ms + types1.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "")
- + "c" + cs + types2.map(t => definitions.abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
+ + "m" + ms + types1.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "")
+ + "c" + cs + types2.map(t => abbrvTag(t.typeSymbol)).mkString("", "", "$sp"))
}
- }
+ )
lazy val specializableTypes = ScalaValueClasses map (_.tpe) sorted
@@ -352,7 +355,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
specializedOn(sym) map (s => specializesClass(s).tpe) sorted
if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass))
- reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".")
+ reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".")
types
}
@@ -372,7 +375,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// zip the keys with each permutation to create a TypeEnv.
// If we don't exclude the "all AnyRef" specialization, we will
// incur duplicate members and crash during mixin.
- loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefClass.tpe)) map (xss => Map(keys zip xss: _*))
+ loop(keys map concreteTypes) filterNot (_ forall (_ <:< AnyRefTpe)) map (xss => Map(keys zip xss: _*))
}
/** Does the given 'sym' need to be specialized in the environment 'env'?
@@ -397,11 +400,16 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case _ => false
})
def specializedTypeVars(tpes: List[Type]): immutable.Set[Symbol] = {
- val buf = Set.newBuilder[Symbol]
- tpes foreach (tp => buf ++= specializedTypeVars(tp))
- buf.result
+ @tailrec def loop(result: immutable.Set[Symbol], xs: List[Type]): immutable.Set[Symbol] = {
+ if (xs.isEmpty) result
+ else loop(result ++ specializedTypeVars(xs.head), xs.tail)
+ }
+ loop(immutable.Set.empty, tpes)
}
- def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = beforeTyper(specializedTypeVars(sym.info))
+ def specializedTypeVars(sym: Symbol): immutable.Set[Symbol] = (
+ if (neverHasTypeParameters(sym)) immutable.Set.empty
+ else enteringTyper(specializedTypeVars(sym.info))
+ )
/** Return the set of @specialized type variables mentioned by the given type.
* It only counts type variables that appear:
@@ -412,7 +420,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def specializedTypeVars(tpe: Type): immutable.Set[Symbol] = tpe match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType)
- specializedTypeVars(tpe.normalize)
+ specializedTypeVars(tpe.dealiasWiden)
else if (sym.isTypeParameter && sym.isSpecialized || (sym.isTypeSkolem && sym.deSkolemize.isSpecialized))
Set(sym)
else if (sym == ArrayClass)
@@ -430,7 +438,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case AnnotatedType(_, tp, _) => specializedTypeVars(tp)
case TypeBounds(lo, hi) => specializedTypeVars(lo :: hi :: Nil)
case RefinedType(parents, _) => parents flatMap specializedTypeVars toSet
- case _ => Set()
+ case _ => immutable.Set.empty
}
/** Returns the type parameter in the specialized class `sClass` that corresponds to type parameter
@@ -441,7 +449,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
sClassMap.getOrElseUpdate(tparam,
tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX)
- modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+ modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
).tpe
}
@@ -475,7 +483,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
foreach2(syms, cloned) { (orig, cln) =>
cln.removeAnnotation(SpecializedClass)
if (env.contains(orig))
- cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe))
+ cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe))
}
cloned map (_ substInfo (syms, cloned))
}
@@ -484,7 +492,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the specialized symbol (class (specialization) or member (normalization)), leaves everything else as-is.
*/
private def mapAnyRefsInSpecSym(env: TypeEnv, origsym: Symbol, specsym: Symbol): TypeEnv = env map {
- case (sym, tp) if tp == AnyRefClass.tpe && sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
+ case (sym, AnyRefTpe) if sym.owner == origsym => (sym, typeParamSubAnyRef(sym, specsym))
case x => x
}
@@ -492,8 +500,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* the original class, leaves everything else as-is.
*/
private def mapAnyRefsInOrigCls(env: TypeEnv, origcls: Symbol): TypeEnv = env map {
- case (sym, tp) if (tp == AnyRefClass.tpe) && sym.owner == origcls => (sym, sym.tpe)
- case x => x
+ case (sym, AnyRefTpe) if sym.owner == origcls => (sym, sym.tpe)
+ case x => x
}
/** Specialize 'clazz', in the environment `outerEnv`. The outer
@@ -506,9 +514,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
def specializeClass(clazz: Symbol, outerEnv: TypeEnv): List[Symbol] = {
def specializedClass(env0: TypeEnv, normMembers: List[Symbol]): Symbol = {
- /** It gets hard to follow all the clazz and cls, and specializedClass
- * was both already used for a map and mucho long. So "sClass" is the
- * specialized subclass of "clazz" throughout this file.
+ /* It gets hard to follow all the clazz and cls, and specializedClass
+ * was both already used for a map and mucho long. So "sClass" is the
+ * specialized subclass of "clazz" throughout this file.
*/
// SI-5545: Eliminate classes with the same name loaded from the bytecode already present - all we need to do is
@@ -525,7 +533,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def cloneInSpecializedClass(member: Symbol, flagFn: Long => Long, newName: Name = null) =
member.cloneSymbol(sClass, flagFn(member.flags | SPECIALIZED), newName)
- sClass.sourceFile = clazz.sourceFile
+ sClass.associatedFile = clazz.sourceFile
currentRun.symSource(sClass) = clazz.sourceFile // needed later on by mixin
val env = mapAnyRefsInSpecSym(env0, clazz, sClass)
@@ -537,7 +545,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned)
// has to be a val in order to be computed early. It is later called
- // within 'atPhase(next)', which would lead to an infinite cycle otherwise
+ // within 'enteringPhase(next)', which would lead to an infinite cycle otherwise
val specializedInfoType: Type = {
oldClassTParams = survivingParams(clazz.info.typeParams, env)
newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env)
@@ -546,18 +554,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def applyContext(tpe: Type) =
subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe))
- /** Return a list of specialized parents to be re-mixed in a specialized subclass.
- * Assuming env = [T -> Int] and
- * class Integral[@specialized T] extends Numeric[T]
- * and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
+ /* Return a list of specialized parents to be re-mixed in a specialized subclass.
+ * Assuming env = [T -> Int] and
+ * class Integral[@specialized T] extends Numeric[T]
+ * and Numeric[U] is specialized on U, this produces List(Numeric$mcI).
*
- * so that class Integral$mci extends Integral[Int] with Numeric$mcI.
+ * so that class Integral$mci extends Integral[Int] with Numeric$mcI.
*/
def specializedParents(parents: List[Type]): List[Type] = {
var res: List[Type] = Nil
// log(specializedClass + ": seeking specialized parents of class with parents: " + parents.map(_.typeSymbol))
for (p <- parents) {
- val stp = afterSpecialize(specializedType(p))
+ val stp = exitingSpecialize(specializedType(p))
if (stp != p)
if (p.typeSymbol.isTrait) res ::= stp
else if (currentRun.compiles(clazz))
@@ -567,7 +575,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
res
}
- var parents = List(applyContext(beforeTyper(clazz.tpe)))
+ var parents = List(applyContext(enteringTyper(clazz.tpe_*)))
// log("!!! Parents: " + parents + ", sym: " + parents.map(_.typeSymbol))
if (parents.head.typeSymbol.isTrait)
parents = parents.head.parents.head :: parents
@@ -589,13 +597,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
GenPolyType(newClassTParams, ClassInfoType(parents ::: extraSpecializedMixins, decls1, sClass))
}
- afterSpecialize(sClass setInfo specializedInfoType)
+ exitingSpecialize(sClass setInfo specializedInfoType)
val fullEnv = outerEnv ++ env
- /** Enter 'sym' in the scope of the current specialized class. It's type is
- * mapped through the active environment, binding type variables to concrete
- * types. The existing typeEnv for `sym` is composed with the current active
- * environment
+ /* Enter 'sym' in the scope of the current specialized class. It's type is
+ * mapped through the active environment, binding type variables to concrete
+ * types. The existing typeEnv for `sym` is composed with the current active
+ * environment
*/
def enterMember(sym: Symbol): Symbol = {
typeEnv(sym) = fullEnv ++ typeEnv(sym) // append the full environment
@@ -608,18 +616,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
decls1 enter subst(fullEnv)(sym)
}
- /** Create and enter in scope an overridden symbol m1 for `m` that forwards
- * to `om`. `om` is a fresh, special overload of m1 that is an implementation
- * of `m`. For example, for a
+ /* Create and enter in scope an overridden symbol m1 for `m` that forwards
+ * to `om`. `om` is a fresh, special overload of m1 that is an implementation
+ * of `m`. For example, for a
*
- * class Foo[@specialized A] {
- * def m(x: A) = <body> // m
- * }
- * , for class Foo$I extends Foo[Int], this method enters two new symbols in
- * the scope of Foo$I:
+ * class Foo[@specialized A] {
+ * def m(x: A) = <body> // m
+ * }
+ * , for class Foo$I extends Foo[Int], this method enters two new symbols in
+ * the scope of Foo$I:
*
- * def m(x: Int) = m$I(x) // m1
- * def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
+ * def m(x: Int) = m$I(x) // m1
+ * def m$I(x: Int) = <body>/adapted to env {A -> Int} // om
*/
def forwardToOverload(m: Symbol): Symbol = {
val specMember = enterMember(cloneInSpecializedClass(m, f => (f | OVERRIDE) & ~(DEFERRED | CASEACCESSOR)))
@@ -683,7 +691,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def mkAccessor(field: Symbol, name: Name) = {
val newFlags = (SPECIALIZED | m.getter(clazz).flags) & ~(LOCAL | CASEACCESSOR | PARAMACCESSOR)
// we rely on the super class to initialize param accessors
- val sym = sClass.newMethod(name, field.pos, newFlags)
+ val sym = sClass.newMethod(name.toTermName, field.pos, newFlags)
info(sym) = SpecializedAccessor(field)
sym
}
@@ -702,7 +710,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
// debuglog("m: " + m + " isLocal: " + nme.isLocalName(m.name) + " specVal: " + specVal.name + " isLocal: " + nme.isLocalName(specVal.name))
if (nme.isLocalName(m.name)) {
- val specGetter = mkAccessor(specVal, nme.localToGetter(specVal.name)) setInfo MethodType(Nil, specVal.info)
+ val specGetter = mkAccessor(specVal, specVal.getterName) setInfo MethodType(Nil, specVal.info)
val origGetter = overrideIn(sClass, m.getter(clazz))
info(origGetter) = Forward(specGetter)
enterMember(specGetter)
@@ -717,10 +725,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (specVal.isVariable && m.setter(clazz) != NoSymbol) {
- val specSetter = mkAccessor(specVal, nme.getterToSetter(specGetter.name))
+ val specSetter = mkAccessor(specVal, specGetter.setterName)
.resetFlag(STABLE)
specSetter.setInfo(MethodType(specSetter.newSyntheticValueParams(List(specVal.info)),
- UnitClass.tpe))
+ UnitTpe))
val origSetter = overrideIn(sClass, m.setter(clazz))
info(origSetter) = Forward(specSetter)
enterMember(specSetter)
@@ -777,7 +785,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (existing != NoSymbol)
clazz.owner.info.decls.unlink(existing)
- afterSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
+ exitingSpecialize(clazz.owner.info.decls enter spc) //!!! assumes fully specialized classes
}
if (subclasses.nonEmpty) clazz.resetFlag(FINAL)
cleanAnyRefSpecCache(clazz, decls1)
@@ -795,7 +803,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
- if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
+ if (!sym.isMethod || enteringTyper(sym.typeParams.isEmpty)) Nil
else if (sym.hasDefault) {
/* Specializing default getters is useless, also see SI-7329 . */
sym.resetFlag(SPECIALIZED)
@@ -878,6 +886,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec))
+ owner.info.decls.enter(specMember)
typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec
wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s }
@@ -906,10 +915,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
/** Return the specialized overload of `m`, in the given environment. */
- private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv): Symbol = {
+ private def specializedOverload(owner: Symbol, sym: Symbol, env: TypeEnv, nameSymbol: Symbol = NoSymbol): Symbol = {
val newFlags = (sym.flags | SPECIALIZED) & ~(DEFERRED | CASEACCESSOR)
// this method properly duplicates the symbol's info
- ( sym.cloneSymbol(owner, newFlags, newName = specializedName(sym, env))
+ val specname = specializedName(nameSymbol orElse sym, env)
+ ( sym.cloneSymbol(owner, newFlags, newName = specname)
modifyInfo (info => subst(env, info.asSeenFrom(owner.thisType, sym.owner)))
)
}
@@ -925,13 +935,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* this method will return List('apply$mcII$sp')
*/
private def specialOverrides(clazz: Symbol) = logResultIf[List[Symbol]]("specialized overrides in " + clazz, _.nonEmpty) {
- /** Return the overridden symbol in syms that needs a specialized overriding symbol,
- * together with its specialization environment. The overridden symbol may not be
- * the closest to 'overriding', in a given hierarchy.
+ /* Return the overridden symbol in syms that needs a specialized overriding symbol,
+ * together with its specialization environment. The overridden symbol may not be
+ * the closest to 'overriding', in a given hierarchy.
*
- * An method m needs a special override if
- * * m overrides a method whose type contains specialized type variables
- * * there is a valid specialization environment that maps the overridden method type to m's type.
+ * An method m needs a special override if
+ * * m overrides a method whose type contains specialized type variables
+ * * there is a valid specialization environment that maps the overridden method type to m's type.
*/
def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = {
def checkOverriddenTParams(overridden: Symbol) {
@@ -956,7 +966,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
checkOverriddenTParams(overridden)
val env = unify(overridden.info, overriding.info, emptyEnv, false, true)
- def atNext = afterSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
+ def atNext = exitingSpecialize(overridden.owner.info.decl(specializedName(overridden, env)))
if (TypeEnv.restrict(env, stvars).nonEmpty && TypeEnv.isValid(env, overridden) && atNext != NoSymbol) {
debuglog(" " + pp(env) + " found " + atNext)
@@ -969,18 +979,36 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
(clazz.info.decls flatMap { overriding =>
needsSpecialOverride(overriding) match {
- case (NoSymbol, _) => None
+ case (NoSymbol, _) =>
+ if (overriding.isSuperAccessor) {
+ val alias = overriding.alias
+ debuglog("checking special overload for super accessor: %s, alias for %s".format(overriding.fullName, alias.fullName))
+ needsSpecialOverride(alias) match {
+ case nope @ (NoSymbol, _) => None
+ case (overridden, env) =>
+ val om = specializedOverload(clazz, overriding, env, overridden)
+ om.setName(nme.superName(om.name))
+ om.asInstanceOf[TermSymbol].setAlias(info(alias).target)
+ om.owner.info.decls.enter(om)
+ info(om) = SpecialSuperAccessor(om)
+ om.makeNotPrivate(om.owner)
+ newOverload(overriding, om, env)
+ Some(om)
+ }
+ } else None
case (overridden, env) =>
val om = specializedOverload(clazz, overridden, env)
+ clazz.info.decls.enter(om)
foreachWithIndex(om.paramss) { (params, i) =>
foreachWithIndex(params) { (param, j) =>
param.name = overriding.paramss(i)(j).name // SI-6555 Retain the parameter names from the subclass.
}
}
debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
+ if (overriding.isAbstractOverride) om.setFlag(ABSOVERRIDE)
typeEnv(om) = env
addConcreteSpecMethod(overriding)
- if (overriding.isDeferred) { // abstract override
+ if (overriding.isDeferred) { // abstract override
debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
info(om) = Forward(overriding)
}
@@ -998,8 +1026,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
info(overriding) = Forward(om setPos overriding.pos)
}
+
newOverload(overriding, om, env)
- ifDebug(afterSpecialize(assert(
+ ifDebug(exitingSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
)
@@ -1028,7 +1057,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (isPrimitiveValueClass(tp2.typeSymbol) || isSpecializedAnyRefSubtype(tp2, sym1))
env + ((sym1, tp2))
else if (isSpecializedAnyRefSubtype(tp2, sym1))
- env + ((sym1, tp2)) // env + ((sym1, AnyRefClass.tpe))
+ env + ((sym1, tp2))
else if (strict)
unifyError(tp1, tp2)
else
@@ -1085,10 +1114,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Apply type bindings in the given environment `env` to all declarations. */
- private def subst(env: TypeEnv, decls: List[Symbol]): List[Symbol] =
- decls map subst(env)
-
/** Apply the type environment 'env' to the given type. All type
* bindings are supposed to be to primitive types. A type variable
* that is annotated with 'uncheckedVariance' is mapped to the corresponding
@@ -1115,35 +1140,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def subst(env: TypeEnv)(decl: Symbol): Symbol =
decl modifyInfo (info =>
- if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe)
+ if (decl.isConstructor) MethodType(subst(env, info).params, decl.owner.tpe_*)
else subst(env, info)
)
- /** Checks if the type parameter symbol is not specialized
- * and is used as type parameters when extending a class with a specialized
- * type parameter.
- * At some point we may remove this restriction.
- *
- * Example:
- *
- * class Base[@specialized T]
- * class Derived[T] extends Base[T] // a non-specialized T is
- * // used as a type param for Base
- * // -> returning true
- */
- private def notSpecializedIn(tsym: Symbol, supertpe: Type) = supertpe match {
- case TypeRef(_, supersym, supertargs) =>
- val tspec = specializedOn(tsym).toSet
- for (supt <- supersym.typeParams) {
- val supspec = specializedOn(supt).toSet
- if (tspec != supspec && tspec.subsetOf(supspec))
- reporter.error(tsym.pos, "Type parameter has to be specialized at least for the same types as in the superclass. Missing types: " + (supspec.diff(tspec)).mkString(", "))
- }
- case _ => //log("nope")
- }
-
private def unspecializableClass(tp: Type) = (
- definitions.isRepeatedParamType(tp) // ???
+ isRepeatedParamType(tp) // ???
|| tp.typeSymbol.isJavaDefined
|| tp.typeSymbol.isPackageClass
)
@@ -1152,12 +1154,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* If it is a 'no-specialization' run, it is applied only to loaded symbols.
*/
override def transformInfo(sym: Symbol, tpe: Type): Type = {
- if (settings.nospecialization.value && currentRun.compiles(sym)) tpe
+ if (settings.nospecialization && currentRun.compiles(sym)) tpe
else tpe.resultType match {
case cinfo @ ClassInfoType(parents, decls, clazz) if !unspecializableClass(cinfo) =>
val tparams = tpe.typeParams
if (tparams.isEmpty)
- afterSpecialize(parents map (_.typeSymbol.info))
+ exitingSpecialize(parents map (_.typeSymbol.info))
val parents1 = parents mapConserve specializedType
if (parents ne parents1) {
@@ -1178,7 +1180,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*
* A conflicting type environment could still be satisfiable.
*/
- def conflicting(env: TypeEnv) = !nonConflicting(env)
def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) =>
(subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi))
}
@@ -1248,9 +1249,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
class BodyDuplicator(_context: Context) extends super.BodyDuplicator(_context) {
override def castType(tree: Tree, pt: Type): Tree = {
- // log(" expected type: " + pt)
- // log(" tree type: " + tree.tpe)
- tree.tpe = if (tree.tpe != null) fixType(tree.tpe) else null
+ tree modifyType fixType
// log(" tree type: " + tree.tpe)
val ntree = if (tree.tpe != null && !(tree.tpe <:< pt)) {
val casttpe = CastMap(tree.tpe)
@@ -1258,8 +1257,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
else if (casttpe <:< CastMap(pt)) gen.mkCast(tree, pt)
else tree
} else tree
- ntree.tpe = null
- ntree
+
+ ntree.clearType()
}
}
@@ -1301,7 +1300,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (sym.isPrivate) debuglog(
"seeing private member %s, currentClass: %s, owner: %s, isAccessible: %b, isLocalName: %b".format(
sym, currentClass, sym.owner.enclClass, isAccessible(sym), nme.isLocalName(sym.name))
- )
+ )
if (shouldMakePublic(sym) && !isAccessible(sym)) {
debuglog("changing private flag of " + sym)
sym.makeNotPrivate(sym.owner)
@@ -1386,59 +1385,72 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
def transform1(tree: Tree) = {
val symbol = tree.symbol
-
- /** The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
- def specSym(qual: Tree): Option[Symbol] = {
+ /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */
+ def specSym(qual: Tree): Symbol = {
val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- debuglog("[specSym] checking for rerouting: %s with \n\tsym.tpe: %s, \n\ttree.tpe: %s \n\tenv: %s \n\tname: %s"
- .format(tree, symbol.tpe, tree.tpe, env, specializedName(symbol, env)))
- if (!env.isEmpty) { // a method?
- val specCandidates = qual.tpe.member(specializedName(symbol, env))
- val specMember = specCandidates suchThat { s =>
- doesConform(symbol, tree.tpe, qual.tpe.memberType(s), env)
- }
+ def isMatch(member: Symbol) = (
+ doesConform(symbol, tree.tpe, qual.tpe memberType member, env)
+ && TypeEnv.includes(typeEnv(member), env)
+ )
+ if (env.isEmpty) NoSymbol
+ else qual.tpe member specializedName(symbol, env) suchThat isMatch
+ }
- debuglog("[specSym] found: " + specCandidates.tpe + ", instantiated as: " + tree.tpe)
- debuglog("[specSym] found specMember: " + specMember)
- if (specMember ne NoSymbol)
- if (TypeEnv.includes(typeEnv(specMember), env)) Some(specMember)
- else {
- debuglog("wrong environments for specialized member: \n\ttypeEnv(%s) = %s\n\tenv = %s".format(specMember, typeEnv(specMember), env))
- None
- }
- else None
- } else None
+ def matchingSymbolInPrefix(pre: Type, member: Symbol, env: TypeEnv): Symbol = {
+ pre member specializedName(member, env) suchThat (_.tpe matches subst(env, member.tpe))
+ }
+
+ def transformSelect(sel: Select) = {
+ val Select(qual, name) = sel
+ debuglog(s"specializing Select(sym=${symbol.defString}, tree.tpe=${tree.tpe})")
+
+ val qual1 = transform(qual)
+ def copySelect = treeCopy.Select(tree, qual1, name)
+ def newSelect(member: Symbol) = atPos(tree.pos)(Select(qual1, member))
+ def typedOp(member: Symbol) = localTyper typedOperator newSelect(member)
+ def typedTree(member: Symbol) = localTyper typed newSelect(member)
+
+ val ignoreEnv = specializedTypeVars(symbol.info).isEmpty || name == nme.CONSTRUCTOR
+ if (ignoreEnv) overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) => typedOp(member)
+ case _ => copySelect
+ }
+ else {
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => typedOp(member)
+ case _ =>
+ matchingSymbolInPrefix(qual1.tpe, symbol, env) match {
+ case NoSymbol => copySelect
+ case member if member.isMethod => typedOp(member)
+ case member => typedTree(member)
+ }
+ }
+ }
}
curTree = tree
tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
def transformNew = {
- debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
- val found = findSpec(tpt.tpe)
- if (found.typeSymbol ne tpt.tpe.typeSymbol) {
- // the ctor can be specialized
- debuglog("** instantiated specialized type: " + found)
- reportError {
- localTyper.typedPos(tree.pos)(New(found, transformTrees(args): _*))
- } {
- _ => super.transform(tree)
+ debuglog("Attempting to specialize new %s(%s)".format(tpt, args.mkString(", ")))
+ val found = specializedType(tpt.tpe)
+ if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized
+ val inst = New(found, transformTrees(args): _*)
+ reportError(localTyper.typedPos(tree.pos)(inst))(_ => super.transform(tree))
}
- } else super.transform(tree)
+ else
+ super.transform(tree)
}
transformNew
- case Apply(sel @ Select(sup @ Super(qual, name), name1), args)
- if (sup.symbol.info.parents != beforePrevPhase(sup.symbol.info.parents)) =>
+ case Apply(sel @ Select(sup @ Super(qual, name), name1), args) if hasNewParents(sup) =>
def transformSuperApply = {
-
- def parents = sup.symbol.info.parents
- debuglog(tree + " parents changed from: " + beforePrevPhase(parents) + " to: " + parents)
-
- val res = localTyper.typed(
- Apply(Select(Super(qual, name) setPos sup.pos, name1) setPos sel.pos, transformTrees(args)) setPos tree.pos)
- debuglog("retyping call to super, from: " + symbol + " to " + res.symbol)
- res
+ val sup1 = Super(qual, name) setPos sup.pos
+ val tree1 = Apply(Select(sup1, name1) setPos sel.pos, transformTrees(args))
+ val res = localTyper.typedPos(tree.pos)(tree1)
+ debuglog(s"retyping call to super, from: $symbol to ${res.symbol}")
+ res
}
transformSuperApply
@@ -1449,7 +1461,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val qual1 = transform(qual)
// log(">>> TypeApply: " + tree + ", qual1: " + qual1)
specSym(qual1) match {
- case Some(specMember) =>
+ case NoSymbol =>
+ // See pos/exponential-spec.scala - can't call transform on the whole tree again.
+ treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), transformTrees(targs))
+ case specMember =>
debuglog("found " + specMember.fullName)
ifDebug(assert(symbol.info.typeParams.length == targs.length, symbol.info.typeParams + " / " + targs))
@@ -1459,7 +1474,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
// See SI-5583. Don't know why it happens now if it didn't before.
if (specMember.info.typeParams.isEmpty && residualTargs.nonEmpty) {
- log("!!! Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
+ devWarning("Type args to be applied, but symbol says no parameters: " + ((specMember.defString, residualTargs)))
localTyper.typed(sel)
}
else {
@@ -1471,11 +1486,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("rewrote " + tree + " to " + tree1)
localTyper.typedOperator(atPos(tree.pos)(tree1)) // being polymorphic, it must be a method
}
-
- case None =>
- treeCopy.TypeApply(tree, treeCopy.Select(sel, qual1, name), super.transformTrees(targs))
- // See pos/exponential-spec.scala - can't call transform on the whole tree again.
- // super.transform(tree)
}
}
transformTypeApply
@@ -1485,36 +1495,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
tree
- case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
- debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- if (env.isEmpty) super.transform(tree)
- else {
- val qual1 = transform(qual)
- def reselect(member: Symbol) = {
- val newSelect = atPos(tree.pos)(Select(qual1, member))
- if (member.isMethod) localTyper typedOperator newSelect
- else localTyper typed newSelect
- }
- overloads(symbol) find (_ matchesEnv env) match {
- case Some(Overload(member, _)) => reselect(member)
- case _ =>
- val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol)
- reselect(specMember)
- else
- treeCopy.Select(tree, qual1, name)
- }
- }
- case Select(qual, _) =>
- overloads(symbol) find (_ matchesSym symbol) match {
- case Some(Overload(member, _)) =>
- val newTree = Select(transform(qual), member)
- debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
- localTyper.typedOperator(atPos(tree.pos)(newTree))
- case None =>
- super.transform(tree)
- }
+ case sel @ Select(_, _) =>
+ transformSelect(sel)
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
@@ -1539,47 +1521,37 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
transformTemplate
case ddef @ DefDef(_, _, _, vparamss, _, _) if info.isDefinedAt(symbol) =>
- def transformDefDef = {
- // log("--> method: " + ddef + " in " + ddef.symbol.owner + ", " + info(symbol))
- def reportTypeError(body: =>Tree) = reportError(body)(_ => ddef)
-
+ def transformDefDef = {
if (symbol.isConstructor) {
-
- val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperSelect, vparamss, symbol.owner))
-
+ val t = atOwner(symbol)(forwardCtorCall(tree.pos, gen.mkSuperInitCall, vparamss, symbol.owner))
if (symbol.isPrimaryConstructor)
- localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant()))))
+ localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(())))))
else // duplicate the original constructor
- reportTypeError(duplicateBody(ddef, info(symbol).target))
+ reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef)
}
else info(symbol) match {
case Implementation(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
// we have an rhs, specialize it
- val tree1 = reportTypeError {
- duplicateBody(ddef, target)
- }
+ val tree1 = reportError(duplicateBody(ddef, target))(_ => ddef)
debuglog("implementation: " + tree1)
deriveDefDef(tree1)(transform)
case NormalizedMember(target) =>
- val constraints = satisfiabilityConstraints(typeEnv(symbol))
- log("constraints: " + constraints)
- if (target.isDeferred || constraints == None) {
- deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
- } else {
- // we have an rhs, specialize it
- val tree1 = reportTypeError {
- duplicateBody(ddef, target, constraints.get)
- }
- debuglog("implementation: " + tree1)
- deriveDefDef(tree1)(transform)
+ logResult("constraints")(satisfiabilityConstraints(typeEnv(symbol))) match {
+ case Some(constraint) if !target.isDeferred =>
+ // we have an rhs, specialize it
+ val tree1 = reportError(duplicateBody(ddef, target, constraint))(_ => ddef)
+ debuglog("implementation: " + tree1)
+ deriveDefDef(tree1)(transform)
+ case _ =>
+ deriveDefDef(tree)(_ => localTyper typed gen.mkSysErrorCall("Fatal error in code generation: this should never be called."))
}
case SpecialOverride(target) =>
assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName)
//debuglog("moving implementation, body of target " + target + ": " + body(target))
- debuglog("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
+ log("%s is param accessor? %b".format(ddef.symbol, ddef.symbol.isParamAccessor))
// we have an rhs, specialize it
val tree1 = addBody(ddef, target)
(new ChangeOwnerTraverser(target, tree1.symbol))(tree1.rhs)
@@ -1627,6 +1599,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case Abstract(targ) =>
debuglog("abstract: " + targ)
localTyper.typed(deriveDefDef(tree)(rhs => rhs))
+
+ case SpecialSuperAccessor(targ) =>
+ debuglog("special super accessor: " + targ + " for " + tree)
+ localTyper.typed(deriveDefDef(tree)(rhs => rhs))
}
}
transformDefDef
@@ -1648,7 +1624,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
deriveValDef(newValDef)(transform)
}
transformValDef
-
case _ =>
super.transform(tree)
}
@@ -1685,7 +1660,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val symbol = tree.symbol
debuglog("specializing body of" + symbol.defString)
val DefDef(_, _, tparams, vparams :: Nil, tpt, _) = tree
-// val (_, origtparams) = splitParams(source.typeParams)
val env = typeEnv(symbol)
val boundTvars = env.keySet
val origtparams = source.typeParams.filter(tparam => !boundTvars(tparam) || !isPrimitiveValueType(env(tparam)))
@@ -1712,8 +1686,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
false) // don't make private fields public
val newBody = symSubstituter(body(source).duplicate)
- tpt.tpe = tpt.tpe.substSym(oldtparams, newtparams)
-
+ tpt modifyType (_.substSym(oldtparams, newtparams))
copyDefDef(tree)(vparamss = List(newSyms map ValDef), rhs = newBody)
}
@@ -1766,10 +1739,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
if (hasSpecializedFields) {
val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED)
- val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanClass.tpe)
+ val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe)
mbrs += atPos(sym.pos) {
- DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanClass.tpe)).setType(NoType)
+ DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType)
}
}
mbrs.toList
@@ -1816,16 +1789,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
* }}
*/
private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = {
+ log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)")
- /** A constructor parameter `f` initializes a specialized field
- * iff:
- * - it is specialized itself
- * - there is a getter for the original (non-specialized) field in the same class
- * - there is a getter for the specialized field in the same class
+ /* A constructor parameter `f` initializes a specialized field
+ * iff:
+ * - it is specialized itself
+ * - there is a getter for the original (non-specialized) field in the same class
+ * - there is a getter for the specialized field in the same class
*/
def initializesSpecializedField(f: Symbol) = (
(f.name endsWith nme.SPECIALIZED_SUFFIX)
- && clazz.info.member(nme.originalName(f.name)).isPublic
+ && clazz.info.member(f.unexpandedName).isPublic
&& clazz.info.decl(f.name).suchThat(_.isGetter) != NoSymbol
)
@@ -1852,16 +1826,12 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
//! TODO: make sure the param types are seen from the right prefix
map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe))
)
- private def findSpec(tp: Type): Type = tp match {
- case TypeRef(pre, sym, _ :: _) => specializedType(tp)
- case _ => tp
- }
class SpecializationTransformer(unit: CompilationUnit) extends Transformer {
informProgress("specializing " + unit)
override def transform(tree: Tree) = {
- val resultTree = if (settings.nospecialization.value) tree
- else afterSpecialize(specializeCalls(unit).transform(tree))
+ val resultTree = if (settings.nospecialization) tree
+ else exitingSpecialize(specializeCalls(unit).transform(tree))
// Remove the final modifier and @inline annotation from anything in the
// original class (since it's being overridden in at least onesubclass).
@@ -1879,13 +1849,5 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
resultTree
- }
- }
-
- def printSpecStats() {
- println(" concreteSpecMembers: %7d".format(concreteSpecMethods.size))
- println(" overloads: %7d".format(overloads.size))
- println(" typeEnv: %7d".format(typeEnv.size))
- println(" info: %7d".format(info.size))
- }
+ } }
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 938499261e..6f422fcc90 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -3,7 +3,8 @@
* @author Iulian Dragos
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab.Flags
@@ -17,7 +18,7 @@ import Flags.SYNTHETIC
abstract class TailCalls extends Transform {
import global._ // the global environment
import definitions._ // standard classes and methods
- import typer.{ typed, typedPos } // methods to type trees
+ import typer.typedPos // methods to type trees
val phaseName: String = "tailcalls"
@@ -31,7 +32,7 @@ abstract class TailCalls extends Transform {
class Phase(prev: scala.tools.nsc.Phase) extends StdPhase(prev) {
def apply(unit: global.CompilationUnit) {
if (!(settings.debuginfo.value == "notailcalls")) {
- newTransformer(unit).transformUnit(unit);
+ newTransformer(unit).transformUnit(unit)
}
}
}
@@ -82,104 +83,117 @@ abstract class TailCalls extends Transform {
* that label.
* </p>
* <p>
- * Assumes: <code>Uncurry</code> has been run already, and no multiple
+ * Assumes: `Uncurry` has been run already, and no multiple
* parameter lists exit.
* </p>
*/
class TailCallElimination(unit: CompilationUnit) extends Transformer {
- private val defaultReason = "it contains a recursive call not in tail position"
+ private def defaultReason = "it contains a recursive call not in tail position"
+ private val failPositions = perRunCaches.newMap[TailContext, Position]() withDefault (_.methodPos)
+ private val failReasons = perRunCaches.newMap[TailContext, String]() withDefaultValue defaultReason
+ private def tailrecFailure(ctx: TailContext) {
+ val method = ctx.method
+ val failReason = failReasons(ctx)
+ val failPos = failPositions(ctx)
+
+ unit.error(failPos, s"could not optimize @tailrec annotated $method: $failReason")
+ }
/** Has the label been accessed? Then its symbol is in this set. */
- private val accessed = new scala.collection.mutable.HashSet[Symbol]()
+ private val accessed = perRunCaches.newSet[Symbol]()
// `accessed` was stored as boolean in the current context -- this is no longer tenable
// with jumps to labels in tailpositions now considered in tailposition,
// a downstream context may access the label, and the upstream one will be none the wiser
// this is necessary because tail-calls may occur in places where syntactically they seem impossible
// (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
+ sealed trait TailContext {
+ def method: Symbol // current method
+ def tparams: List[Symbol] // type parameters
+ def methodPos: Position // default position for failure reporting
+ def tailPos: Boolean // context is in tail position
+ def label: Symbol // new label, tail call target
+ def tailLabels: Set[Symbol]
+
+ def enclosingType = method.enclClass.typeOfThis
+ def isEligible = method.isEffectivelyFinal
+ def isMandatory = method.hasAnnotation(TailrecClass)
+ def isTransformed = isEligible && accessed(label)
+
+ def newThis(pos: Position) = {
+ def msg = "Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
+ method.ownerChain.mkString(" -> "),
+ currentClass.ownerChain.mkString(" -> ")
+ )
+ logResult(msg)(method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis)
+ }
+ override def toString = s"${method.name} tparams=$tparams tailPos=$tailPos label=$label label info=${label.info}"
+ }
- class Context() {
- /** The current method */
- var method: Symbol = NoSymbol
-
- // symbols of label defs in this method that are in tail position
- var tailLabels: Set[Symbol] = Set()
-
- /** The current tail-call label */
- var label: Symbol = NoSymbol
-
- /** The expected type arguments of self-recursive calls */
- var tparams: List[Symbol] = Nil
-
- /** Tells whether we are in a (possible) tail position */
- var tailPos = false
-
- /** The reason this method could not be optimized. */
- var failReason = defaultReason
- var failPos = method.pos
+ object EmptyTailContext extends TailContext {
+ def method = NoSymbol
+ def tparams = Nil
+ def methodPos = NoPosition
+ def tailPos = false
+ def label = NoSymbol
+ def tailLabels = Set.empty[Symbol]
+ }
- def this(that: Context) = {
- this()
- this.method = that.method
- this.tparams = that.tparams
- this.tailPos = that.tailPos
- this.failPos = that.failPos
- this.label = that.label
- this.tailLabels = that.tailLabels
+ class DefDefTailContext(dd: DefDef) extends TailContext {
+ def method = dd.symbol
+ def tparams = dd.tparams map (_.symbol)
+ def methodPos = dd.pos
+ def tailPos = true
+
+ lazy val label = mkLabel()
+ lazy val tailLabels = {
+ // labels are local to a method, so only traverse the rhs of a defdef
+ val collector = new TailPosLabelsTraverser
+ collector traverse dd.rhs
+ collector.tailLabels.toSet
}
- def this(dd: DefDef) {
- this()
- this.method = dd.symbol
- this.tparams = dd.tparams map (_.symbol)
- this.tailPos = true
- this.failPos = dd.pos
-
- /** Create a new method symbol for the current method and store it in
- * the label field.
- */
- this.label = {
- val label = method.newLabel(newTermName("_" + method.name), method.pos)
- val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
- label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
- }
+
+ private def mkLabel() = {
+ val label = method.newLabel(newTermName("_" + method.name), method.pos)
+ val thisParam = method.newSyntheticValueParam(currentClass.typeOfThis)
+ label setInfo MethodType(thisParam :: method.tpe.params, method.tpe.finalResultType)
if (isEligible)
label substInfo (method.tpe.typeParams, tparams)
- }
- def enclosingType = method.enclClass.typeOfThis
- def methodTypeParams = method.tpe.typeParams
- def isEligible = method.isEffectivelyFinal
- // @tailrec annotation indicates mandatory transformation
- def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL
- def isTransformed = isEligible && accessed(label)
- def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
-
- def newThis(pos: Position) = logResult("Creating new `this` during tailcalls\n method: %s\n current class: %s".format(
- method.ownerChain.mkString(" -> "), currentClass.ownerChain.mkString(" -> "))) {
- method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
+ label
}
-
- override def toString(): String = (
- "" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
- " Label: " + label + " Label type: " + label.info
- )
+ private def isRecursiveCall(t: Tree) = {
+ val receiver = t.symbol
+
+ ( (receiver != null)
+ && receiver.isMethod
+ && (method.name == receiver.name)
+ && (method.enclClass isSubClass receiver.enclClass)
+ )
+ }
+ def containsRecursiveCall(t: Tree) = t exists isRecursiveCall
}
-
- private var ctx: Context = new Context()
- private def noTailContext() = {
- val t = new Context(ctx)
- t.tailPos = false
- t
+ class ClonedTailContext(that: TailContext, override val tailPos: Boolean) extends TailContext {
+ def method = that.method
+ def tparams = that.tparams
+ def methodPos = that.methodPos
+ def tailLabels = that.tailLabels
+ def label = that.label
}
+ private var ctx: TailContext = EmptyTailContext
+ private def noTailContext() = new ClonedTailContext(ctx, tailPos = false)
+ private def yesTailContext() = new ClonedTailContext(ctx, tailPos = true)
+
/** Rewrite this tree to contain no tail recursive calls */
- def transform(tree: Tree, nctx: Context): Tree = {
+ def transform(tree: Tree, nctx: TailContext): Tree = {
val saved = ctx
ctx = nctx
try transform(tree)
finally this.ctx = saved
}
+ def yesTailTransform(tree: Tree): Tree = transform(tree, yesTailContext())
def noTailTransform(tree: Tree): Tree = transform(tree, noTailContext())
def noTailTransforms(trees: List[Tree]) = {
val nctx = noTailContext()
@@ -187,38 +201,33 @@ abstract class TailCalls extends Transform {
}
override def transform(tree: Tree): Tree = {
- /** A possibly polymorphic apply to be considered for tail call transformation.
- */
+ /* A possibly polymorphic apply to be considered for tail call transformation. */
def rewriteApply(target: Tree, fun: Tree, targs: List[Tree], args: List[Tree]) = {
val receiver: Tree = fun match {
case Select(qual, _) => qual
case _ => EmptyTree
}
-
def receiverIsSame = ctx.enclosingType.widen =:= receiver.tpe.widen
def receiverIsSuper = ctx.enclosingType.widen <:< receiver.tpe.widen
def isRecursiveCall = (ctx.method eq fun.symbol) && ctx.tailPos
def transformArgs = noTailTransforms(args)
def matchesTypeArgs = ctx.tparams sameElements (targs map (_.tpe.typeSymbol))
- /** Records failure reason in Context for reporting.
- * Position is unchanged (by default, the method definition.)
+ /* Records failure reason in Context for reporting.
+ * Position is unchanged (by default, the method definition.)
*/
def fail(reason: String) = {
debuglog("Cannot rewrite recursive call at: " + fun.pos + " because: " + reason)
-
- ctx.failReason = reason
+ failReasons(ctx) = reason
treeCopy.Apply(tree, noTailTransform(target), transformArgs)
}
- /** Position of failure is that of the tree being considered.
- */
+ /* Position of failure is that of the tree being considered. */
def failHere(reason: String) = {
- ctx.failPos = fun.pos
+ failPositions(ctx) = fun.pos
fail(reason)
}
def rewriteTailCall(recv: Tree): Tree = {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
-
accessed += ctx.label
typedPos(fun.pos) {
val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
@@ -228,12 +237,11 @@ abstract class TailCalls extends Transform {
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
else if (!isRecursiveCall) {
- if (receiverIsSuper) failHere("it contains a recursive call targeting supertype " + receiver.tpe)
+ if (receiverIsSuper) failHere("it contains a recursive call targeting a supertype")
else failHere(defaultReason)
}
else if (!matchesTypeArgs) failHere("it is called recursively with different type arguments")
else if (receiver == EmptyTree) rewriteTailCall(This(currentClass))
- else if (forMSIL) fail("it cannot be optimized on MSIL")
else if (!receiverIsSame) failHere("it changes type of 'this' on a polymorphic recursive call")
else rewriteTailCall(receiver)
}
@@ -245,37 +253,23 @@ abstract class TailCalls extends Transform {
super.transform(tree)
- case dd @ DefDef(_, _, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
- val newCtx = new Context(dd)
- def isRecursiveCall(t: Tree) = {
- val sym = t.symbol
- (sym != null) && {
- sym.isMethod && (dd.symbol.name == sym.name) && (dd.symbol.enclClass isSubClass sym.enclClass)
- }
- }
- if (newCtx.isMandatory) {
- if (!rhs0.exists(isRecursiveCall)) {
- unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
- }
- }
-
- // labels are local to a method, so only traverse the rhs of a defdef
- val collectTailPosLabels = new TailPosLabelsTraverser
- collectTailPosLabels traverse rhs0
- newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
+ case dd @ DefDef(_, name, _, vparamss0, _, rhs0) if !dd.symbol.hasAccessorFlag =>
+ val newCtx = new DefDefTailContext(dd)
+ if (newCtx.isMandatory && !(newCtx containsRecursiveCall rhs0))
+ unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
- debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
+ debuglog(s"Considering $name for tailcalls, with labels in tailpos: ${newCtx.tailLabels}")
val newRHS = transform(rhs0, newCtx)
- deriveDefDef(tree){rhs =>
+ deriveDefDef(tree) { rhs =>
if (newCtx.isTransformed) {
- /** We have rewritten the tree, but there may be nested recursive calls remaining.
- * If @tailrec is given we need to fail those now.
+ /* We have rewritten the tree, but there may be nested recursive calls remaining.
+ * If @tailrec is given we need to fail those now.
*/
if (newCtx.isMandatory) {
for (t @ Apply(fn, _) <- newRHS ; if fn.symbol == newCtx.method) {
- newCtx.failPos = t.pos
- newCtx.tailrecFailure()
+ failPositions(newCtx) = t.pos
+ tailrecFailure(newCtx)
}
}
val newThis = newCtx.newThis(tree.pos)
@@ -287,8 +281,8 @@ abstract class TailCalls extends Transform {
))
}
else {
- if (newCtx.isMandatory && newRHS.exists(isRecursiveCall))
- newCtx.tailrecFailure()
+ if (newCtx.isMandatory && (newCtx containsRecursiveCall newRHS))
+ tailrecFailure(newCtx)
newRHS
}
@@ -349,27 +343,25 @@ abstract class TailCalls extends Transform {
case Apply(tapply @ TypeApply(fun, targs), vargs) =>
rewriteApply(tapply, fun, targs, vargs)
- case Apply(fun, args) =>
- if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
- treeCopy.Apply(tree, fun, transformTrees(args))
- else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
- // this is to detect tailcalls in translated matches
- // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
- // thus, the argument to the call is in tailposition
- val saved = ctx.tailPos
- ctx.tailPos = true
- debuglog("in tailpos label: "+ args.head)
- val res = transform(args.head)
- ctx.tailPos = saved
- if (res ne args.head) {
- // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
- // must leave the jump to the original tailpos-label (fun)!
- // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
- treeCopy.Apply(tree, fun, List(res))
- }
- else rewriteApply(fun, fun, Nil, args)
- } else rewriteApply(fun, fun, Nil, args)
+ case Apply(fun, args) if fun.symbol == Boolean_or || fun.symbol == Boolean_and =>
+ treeCopy.Apply(tree, fun, transformTrees(args))
+
+ // this is to detect tailcalls in translated matches
+ // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+ // thus, the argument to the call is in tailposition
+ case Apply(fun, args @ (arg :: Nil)) if fun.symbol.isLabel && ctx.tailLabels(fun.symbol) =>
+ debuglog(s"in tailpos label: $arg")
+ val res = yesTailTransform(arg)
+ // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+ // must leave the jump to the original tailpos-label (fun)!
+ // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls
+ if (res ne arg)
+ treeCopy.Apply(tree, fun, res :: Nil)
+ else
+ rewriteApply(fun, fun, Nil, args)
+ case Apply(fun, args) =>
+ rewriteApply(fun, fun, Nil, args)
case Alternative(_) | Star(_) | Bind(_, _) =>
sys.error("We should've never gotten inside a pattern")
case Select(qual, name) =>
@@ -404,7 +396,7 @@ abstract class TailCalls extends Transform {
finally maybeTail = saved
}
- def traverseNoTail(tree: Tree) = traverse(tree, false)
+ def traverseNoTail(tree: Tree) = traverse(tree, maybeTailNew = false)
def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
override def traverse(tree: Tree) = tree match {
diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
index c7bc16f249..3feadcd9b2 100644
--- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
+++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package transform
-import scala.collection.{ mutable, immutable }
-
/** A base class for transforms.
* A transform contains a compiler phase which applies a tree transformer.
*/
@@ -19,17 +17,15 @@ trait TypingTransformers {
abstract class TypingTransformer(unit: CompilationUnit) extends Transformer {
var localTyper: analyzer.Typer =
if (phase.erasedTypes)
- erasure.newTyper(erasure.rootContext(unit, EmptyTree, true)).asInstanceOf[analyzer.Typer]
+ erasure.newTyper(erasure.rootContext(unit, EmptyTree, erasedTypes = true)).asInstanceOf[analyzer.Typer]
else
analyzer.newTyper(analyzer.rootContext(unit, EmptyTree, true))
protected var curTree: Tree = _
- protected def typedPos(pos: Position)(tree: Tree) = localTyper typed { atPos(pos)(tree) }
override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans)
def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = {
val savedLocalTyper = localTyper
-// println("transformer atOwner: " + owner + " isPackage? " + owner.isPackage)
localTyper = localTyper.atOwner(tree, if (owner.isModule) owner.moduleClass else owner)
val result = super.atOwner(owner)(trans)
localTyper = savedLocalTyper
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 430129aaff..64f4e579e1 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -3,7 +3,8 @@
* @author
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package transform
import symtab.Flags._
@@ -61,24 +62,6 @@ abstract class UnCurry extends InfoTransform
// uncurry and uncurryType expand type aliases
- /** Traverse tree omitting local method definitions.
- * If a `return` is encountered, set `returnFound` to true.
- * Used for MSIL only.
- */
- private object lookForReturns extends Traverser {
- var returnFound = false
- override def traverse(tree: Tree): Unit = tree match {
- case Return(_) => returnFound = true
- case DefDef(_, _, _, _, _, _) => ;
- case _ => super.traverse(tree)
- }
- def found(tree: Tree) = {
- returnFound = false
- traverse(tree)
- returnFound
- }
- }
-
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var needTryLift = false
private var inPattern = false
@@ -112,8 +95,6 @@ abstract class UnCurry extends InfoTransform
private lazy val serialVersionUIDAnnotation =
AnnotationInfo(SerialVersionUIDAttr.tpe, List(Literal(Constant(0))), List())
- private var nprinted = 0
-
// I don't have a clue why I'm catching TypeErrors here, but it's better
// than spewing stack traces at end users for internal errors. Examples
// which hit at this point should not be hard to come by, but the immediate
@@ -134,7 +115,8 @@ abstract class UnCurry extends InfoTransform
def isByNameRef(tree: Tree) = (
tree.isTerm
&& !byNameArgs(tree)
- && tree.hasSymbolWhich(s => isByNameParamType(s.tpe))
+ && (tree.symbol ne null)
+ && (isByName(tree.symbol))
)
/** Uncurry a type of a tree node.
@@ -160,7 +142,7 @@ abstract class UnCurry extends InfoTransform
/** Return non-local return key for given method */
private def nonLocalReturnKey(meth: Symbol) =
nonLocalReturnKeys.getOrElseUpdate(meth,
- meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectClass.tpe
+ meth.newValue(unit.freshTermName("nonLocalReturnKey"), meth.pos, SYNTHETIC) setInfo ObjectTpe
)
/** Generate a non-local return throw with given return expression from given method.
@@ -202,9 +184,12 @@ abstract class UnCurry extends InfoTransform
THEN ((ex DOT nme.value)())
ELSE (Throw(Ident(ex)))
)
- val keyDef = ValDef(key, New(ObjectClass.tpe))
+ val keyDef = ValDef(key, New(ObjectTpe))
val tryCatch = Try(body, pat -> rhs)
+ for (Try(t, catches, _) <- body ; cdef <- catches ; if treeInfo catchesThrowable cdef)
+ unit.warning(body.pos, "catch block may intercept non-local return from " + meth)
+
Block(List(keyDef), tryCatch)
}
}
@@ -228,8 +213,6 @@ abstract class UnCurry extends InfoTransform
* }
* new $anon()
*
- * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
- *
*/
def transformFunction(fun: Function): Tree = {
fun.tpe match {
@@ -245,9 +228,6 @@ abstract class UnCurry extends InfoTransform
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
- case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
- // only get here when running under -Xoldpatmat
- synthPartialFunction(fun)
case _ =>
val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
@@ -277,137 +257,13 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
}
- /** Transform a function node (x => body) of type PartialFunction[T, R] where
- * body = expr match { case P_i if G_i => E_i }_i=1..n
- * to (assuming none of the cases is a default case):
- *
- * class $anon() extends AbstractPartialFunction[T, R] with Serializable {
- * def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match {
- * case P_1 if G_1 => E_1
- * ...
- * case P_n if G_n => E_n
- * case _ => default(expr)
- * }
- * def isDefinedAt(x: T): boolean = (x: @unchecked) match {
- * case P_1 if G_1 => true
- * ...
- * case P_n if G_n => true
- * case _ => false
- * }
- * }
- * new $anon()
- *
- * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true`
- */
- def synthPartialFunction(fun: Function) = {
- if (!settings.XoldPatmat.value) debugwarn("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
-
- val targs = fun.tpe.typeArgs
- val (formals, restpe) = (targs.init, targs.last)
-
- val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
- val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
- // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
- // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
- val bodyForIDA = {
- val duped = fun.body.duplicate
- val oldParams = new mutable.ListBuffer[Symbol]()
- val newParams = new mutable.ListBuffer[Symbol]()
-
- val oldSyms0 =
- duped filter {
- case l@LabelDef(_, params, _) =>
- params foreach {p =>
- val oldSym = p.symbol
- p.symbol = oldSym.cloneSymbol
- oldParams += oldSym
- newParams += p.symbol
- }
- true
- case _ => false
- } map (_.symbol)
- val oldSyms = oldParams.toList ++ oldSyms0
- val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
- // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
-
- val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
-
- substLabels(duped)
- }
-
- // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- val applyOrElseMethodDef = {
- val methSym = anonClass.newMethod(fun.pos, nme.applyOrElse) setFlag (FINAL | OVERRIDE)
-
- val List(argtpe) = formals
- val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
- val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
- val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
- val params@List(x, default) = methSym newSyntheticValueParams methFormals
- methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
- val body = localTyper.typedPos(fun.pos) { import CODE._
- def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
-
- substParam(fun.body) match {
- case orig@Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) orig
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
- Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
- }
-
- }
- }
- body.changeOwner(fun.symbol -> methSym)
-
- val methDef = DefDef(methSym, body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
-
- val isDefinedAtMethodDef = {
- val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC)
- val params = methSym newSyntheticValueParams formals
- methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
- def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
-
- val body = bodyForIDA match {
- case Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) TRUE_typed
- else
- doSubst(Match(/*gen.mkUnchecked*/(selector),
- (cases map (c => deriveCaseDef(c)(x => TRUE_typed))) :+ (
- DEFAULT ==> FALSE_typed)))
-
- }
- body.changeOwner(fun.symbol -> methSym)
-
- DefDef(methSym, body)
- }
-
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
- }
-
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
def transformVarargs(varargsElemType: Type) = {
@@ -416,7 +272,7 @@ abstract class UnCurry extends InfoTransform
// when calling into scala varargs, make sure it's a sequence.
def arrayToSequence(tree: Tree, elemtp: Type) = {
- afterUncurry {
+ exitingUncurry {
localTyper.typedPos(pos) {
val pt = arrayType(elemtp)
val adaptedTree = // might need to cast to Array[elemtp], as arrays are not covariant
@@ -435,7 +291,7 @@ abstract class UnCurry extends InfoTransform
def getClassTag(tp: Type): Tree = {
val tag = localTyper.resolveClassTag(tree.pos, tp)
// Don't want bottom types getting any further than this (SI-4024)
- if (tp.typeSymbol.isBottomClass) getClassTag(AnyClass.tpe)
+ if (tp.typeSymbol.isBottomClass) getClassTag(AnyTpe)
else if (!tag.isEmpty) tag
else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi)
else localTyper.TyperErrorGen.MissingClassTagError(tree, tp)
@@ -446,7 +302,7 @@ abstract class UnCurry extends InfoTransform
case _ => EmptyTree
}
}
- afterUncurry {
+ exitingUncurry {
localTyper.typedPos(pos) {
gen.mkMethodCall(tree, toArraySym, Nil, List(traversableClassTag(tree.tpe)))
}
@@ -470,7 +326,7 @@ abstract class UnCurry extends InfoTransform
else arrayToSequence(mkArray, varargsElemType)
}
- afterUncurry {
+ exitingUncurry {
if (isJava && !isReferenceArray(suffix.tpe) && isArrayOfSymbol(fun.tpe.params.last.tpe, ObjectClass)) {
// The array isn't statically known to be a reference array, so call ScalaRuntime.toObjectArray.
suffix = localTyper.typedPos(pos) {
@@ -550,15 +406,7 @@ abstract class UnCurry extends InfoTransform
finally needTryLift = saved
}
- /** A try or synchronized needs to be lifted anyway for MSIL if it contains
- * return statements. These are disallowed in the CLR. By lifting
- * such returns will be converted to throws.
- */
- def shouldBeLiftedAnyway(tree: Tree) = false && // buggy, see #1981
- forMSIL && lookForReturns.found(tree)
-
- /** Transform tree `t` to { def f = t; f } where `f` is a fresh name
- */
+ /* Transform tree `t` to { def f = t; f } where `f` is a fresh name */
def liftTree(tree: Tree) = {
debuglog("lifting tree at: " + (tree.pos))
val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos)
@@ -581,7 +429,7 @@ abstract class UnCurry extends InfoTransform
val result = (
// TODO - settings.noassertions.value temporarily retained to avoid
// breakage until a reasonable interface is settled upon.
- if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions.value)))
+ if ((sym ne null) && (sym.elisionLevel.exists (_ < settings.elidebelow.value || settings.noassertions)))
replaceElidableTree(tree)
else translateSynchronized(tree) match {
case dd @ DefDef(mods, name, tparams, _, tpt, rhs) =>
@@ -592,7 +440,7 @@ abstract class UnCurry extends InfoTransform
if (dd.symbol hasAnnotation VarargsClass) saveRepeatedParams(dd)
- withNeedLift(false) {
+ withNeedLift(needLift = false) {
if (dd.symbol.isClassConstructor) {
atOwner(sym) {
val rhs1 = (rhs: @unchecked) match {
@@ -616,11 +464,11 @@ abstract class UnCurry extends InfoTransform
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
if (!sym.owner.isSourceMethod)
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
else
super.transform(tree)
case UnApply(fn, args) =>
- val fn1 = withInPattern(false)(transform(fn))
+ val fn1 = withInPattern(value = false)(transform(fn))
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
@@ -629,24 +477,20 @@ abstract class UnCurry extends InfoTransform
treeCopy.UnApply(tree, fn1, args1)
case Apply(fn, args) =>
- if (fn.symbol == Object_synchronized && shouldBeLiftedAnyway(args.head))
- transform(treeCopy.Apply(tree, fn, List(liftTree(args.head))))
- else {
- val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
- withNeedLift(needLift) {
- val formals = fn.tpe.paramTypes
- treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
- }
+ val needLift = needTryLift || !fn.symbol.isLabel // SI-6749, no need to lift in args to label jumps.
+ withNeedLift(needLift) {
+ val formals = fn.tpe.paramTypes
+ treeCopy.Apply(tree, transform(fn), transformTrees(transformArgs(tree.pos, fn.symbol, args, formals)))
}
case Assign(_: RefTree, _) =>
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
- withNeedLift(true) { super.transform(tree) }
+ withNeedLift(needLift = true) { super.transform(tree) }
case ret @ Return(_) if (isNonLocalReturn(ret)) =>
- withNeedLift(true) { super.transform(ret) }
+ withNeedLift(needLift = true) { super.transform(ret) }
case Try(_, Nil, _) =>
// try-finally does not need lifting: lifting is needed only for try-catch
@@ -656,11 +500,11 @@ abstract class UnCurry extends InfoTransform
super.transform(tree)
case Try(block, catches, finalizer) =>
- if (needTryLift || shouldBeLiftedAnyway(tree)) transform(liftTree(tree))
+ if (needTryLift) transform(liftTree(tree))
else super.transform(tree)
case CaseDef(pat, guard, body) =>
- val pat1 = withInPattern(true)(transform(pat))
+ val pat1 = withInPattern(value = true)(transform(pat))
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
case fun @ Function(_, _) =>
@@ -681,11 +525,11 @@ abstract class UnCurry extends InfoTransform
tree1
}
)
- assert(result.tpe != null, result + " tpe is null")
+ assert(result.tpe != null, result.shortClass + " tpe is null:\n" + result)
result setType uncurryTreeType(result.tpe)
}
- def postTransform(tree: Tree): Tree = afterUncurry {
+ def postTransform(tree: Tree): Tree = exitingUncurry {
def applyUnary(): Tree = {
// TODO_NMT: verify that the inner tree of a type-apply also gets parens if the
// whole tree is a polymorphic nullary method application
@@ -703,44 +547,13 @@ abstract class UnCurry extends InfoTransform
def isThrowable(pat: Tree): Boolean = pat match {
case Typed(Ident(nme.WILDCARD), tpt) =>
- tpt.tpe =:= ThrowableClass.tpe
+ tpt.tpe =:= ThrowableTpe
case Bind(_, pat) =>
isThrowable(pat)
case _ =>
false
}
- def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
-
- def postTransformTry(tree: Try) = {
- val body = tree.block
- val catches = tree.catches
- val finalizer = tree.finalizer
- if (opt.virtPatmat) {
- if (catches exists (cd => !treeInfo.isCatchCase(cd)))
- debugwarn("VPM BUG! illegal try/catch " + catches)
- tree
- } else if (catches forall treeInfo.isCatchCase) {
- tree
- } else {
- val exname = unit.freshTermName("ex$")
- val cases =
- if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
- else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
- val catchall =
- atPos(tree.pos) {
- CaseDef(
- Bind(exname, Ident(nme.WILDCARD)),
- EmptyTree,
- Match(Ident(exname), cases))
- }
- debuglog("rewrote try: " + catches + " ==> " + catchall);
- val catches1 = localTyper.typedCases(
- List(catchall), ThrowableClass.tpe, WildcardType)
- treeCopy.Try(tree, body, catches1, finalizer)
- }
- }
-
tree match {
/* Some uncurry post transformations add members to templates.
*
@@ -779,7 +592,9 @@ abstract class UnCurry extends InfoTransform
addJavaVarargsForwarders(dd, flatdd)
case tree: Try =>
- postTransformTry(tree)
+ if (tree.catches exists (cd => !treeInfo.isCatchCase(cd)))
+ devWarning("VPM BUG - illegal try/catch " + tree.catches)
+ tree
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
@@ -837,7 +652,7 @@ abstract class UnCurry extends InfoTransform
final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
def isDependent(dd: DefDef): Boolean =
- beforeUncurry {
+ enteringUncurry {
val methType = dd.symbol.info
methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
}
@@ -912,10 +727,6 @@ abstract class UnCurry extends InfoTransform
if (!dd.symbol.hasAnnotation(VarargsClass) || !repeatedParams.contains(dd.symbol))
return flatdd
- def toSeqType(tp: Type): Type = {
- val arg = elementType(ArrayClass, tp)
- seqType(arg)
- }
def toArrayType(tp: Type): Type = {
val arg = elementType(SeqClass, tp)
// to prevent generation of an `Object` parameter from `Array[T]` parameter later
@@ -924,7 +735,7 @@ abstract class UnCurry extends InfoTransform
// becomes def foo[T](a: Int, b: Array[Object])
// instead of def foo[T](a: Int, b: Array[T]) ===> def foo[T](a: Int, b: Object)
arrayType(
- if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectClass.tpe
+ if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectTpe
else arg
)
}
@@ -950,7 +761,7 @@ abstract class UnCurry extends InfoTransform
}
// create the symbol
- val forwsym = currentClass.newMethod(dd.name, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
+ val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype
// create the tree
val forwtree = theTyper.typedPos(dd.pos) {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index dbe08315f4..069484ff65 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -4,9 +4,9 @@
* @author Adriaan Moors
*/
-package scala.tools.nsc.transform.patmat
+package scala
+package tools.nsc.transform.patmat
-import scala.tools.nsc.symtab._
import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
@@ -113,7 +113,7 @@ trait Logic extends Debugging {
case object False extends Prop
// symbols are propositions
- abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ abstract case class Sym(variable: Var, const: Const) extends Prop {
private[this] val id = Sym.nextSymId
override def toString = variable +"="+ const +"#"+ id
@@ -212,7 +212,7 @@ trait Logic extends Debugging {
}
props foreach gatherEqualities.apply
- if (modelNull) vars foreach (_.registerNull)
+ if (modelNull) vars foreach (_.registerNull())
val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
@@ -321,7 +321,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
val staticTpCheckable: Type = checkableType(staticTp)
private[this] var _mayBeNull = false
- def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def registerNull(): Unit = { ensureCanModify(); if (NullTp <:< staticTpCheckable) _mayBeNull = true }
def mayBeNull: Boolean = _mayBeNull
// case None => domain is unknown,
@@ -345,16 +345,16 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
} else
subConsts
- observed; allConsts
+ observed(); allConsts
}
// populate equalitySyms
// don't care about the result, but want only one fresh symbol per distinct constant c
- def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+ def registerEquality(c: Const): Unit = {ensureCanModify(); symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
// return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
// (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
- def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+ def propForEqualsTo(c: Const): Prop = {observed(); symForEqualsTo.getOrElse(c, False)}
// [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
/** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
@@ -366,7 +366,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
* and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
*/
lazy val implications = {
- /** when we know V = C, which other equalities must hold
+ /* when we know V = C, which other equalities must hold
*
* in general, equality to some type implies equality to its supertypes
* (this multi-valued kind of equality is necessary for unreachability)
@@ -479,7 +479,7 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
// don't access until all potential equalities have been registered using registerEquality
- private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+ private lazy val equalitySyms = {observed(); symForEqualsTo.values.toList}
// don't call until all equalities have been registered and registerNull has been called (if needed)
def describe = {
@@ -514,11 +514,11 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
uniques.get(tp).getOrElse(
uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
case Some((_, c)) =>
- debug.patmat("unique const: "+ (tp, c))
+ debug.patmat("unique const: "+ ((tp, c)))
c
case _ =>
val fresh = mkFresh
- debug.patmat("uniqued const: "+ (tp, fresh))
+ debug.patmat("uniqued const: "+ ((tp, fresh)))
uniques(tp) = fresh
fresh
})
@@ -534,12 +534,12 @@ trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
if (!t.symbol.isStable) t.tpe.narrow
else trees find (a => a.correspondsStructure(t)(sameValue)) match {
case Some(orig) =>
- debug.patmat("unique tp for tree: "+ (orig, orig.tpe))
+ debug.patmat("unique tp for tree: "+ ((orig, orig.tpe)))
orig.tpe
case _ =>
// duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
- debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ debug.patmat("uniqued: "+ ((t, t.tpe, treeWithNarrowedType.tpe)))
trees += treeWithNarrowedType
treeWithNarrowedType.tpe
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
index 9558542533..f527c30b8a 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -12,18 +12,14 @@ import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
trait TreeAndTypeAnalysis extends Debugging {
- import global.{Tree, Type, Symbol, definitions, analyzer,
- ConstantType, Literal, Constant, appliedType, WildcardType, TypeRef, ModuleClassSymbol,
- nestedMemberType, TypeMap, Ident}
-
+ import global._
import definitions._
import analyzer.Typer
-
// we use subtyping as a model for implication between instanceof tests
// i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
// unfortunately this is not true in general:
- // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefTpe)
def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
val tpValue = tp.typeSymbol.isPrimitiveValueClass
@@ -32,7 +28,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// this allows us to reuse subtyping as a model for implication between instanceOf tests
// the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
val tpImpliedNormalizedToAny =
- if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+ if (tpImplied =:= (if (tpValue) AnyValTpe else AnyRefTpe)) AnyTpe
else tpImplied
tp <:< tpImpliedNormalizedToAny
@@ -52,7 +48,7 @@ trait TreeAndTypeAnalysis extends Debugging {
tp.typeSymbol match {
// TODO case _ if tp.isTupleType => // recurse into component types?
case UnitClass =>
- Some(List(UnitClass.tpe))
+ Some(List(UnitTpe))
case BooleanClass =>
Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
// TODO case _ if tp.isTupleType => // recurse into component types
@@ -60,7 +56,7 @@ trait TreeAndTypeAnalysis extends Debugging {
Some(List(tp))
// make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ debug.patmat("enum unsealed "+ ((tp, sym, sym.isSealed, isPrimitiveValueClass(sym))))
None
case sym =>
val subclasses = (
@@ -68,7 +64,7 @@ trait TreeAndTypeAnalysis extends Debugging {
// symbols which are both sealed and abstract need not be covered themselves, because
// all of their children must be and they cannot otherwise be created.
filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses))
+ debug.patmat("enum sealed -- subclasses: "+ ((sym, subclasses)))
val tpApprox = typer.infer.approximateAbstracts(tp)
val pre = tpApprox.prefix
@@ -86,7 +82,7 @@ trait TreeAndTypeAnalysis extends Debugging {
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
else None
})
- debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ debug.patmat("enum sealed "+ ((tp, tpApprox)) + " as "+ validSubTypes)
Some(validSubTypes)
}
@@ -110,7 +106,7 @@ trait TreeAndTypeAnalysis extends Debugging {
}
val res = typeArgsToWildcardsExceptArray(tp)
- debug.patmat("checkable "+(tp, res))
+ debug.patmat("checkable "+((tp, res)))
res
}
@@ -128,8 +124,8 @@ trait TreeAndTypeAnalysis extends Debugging {
}
trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking {
- import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope}
- import global.definitions.{ListClass, NilModule}
+ import global._
+ import global.definitions._
/**
* Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
@@ -346,8 +342,8 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT
trait MatchAnalysis extends MatchApproximation {
import PatternMatchingStats._
- import global.{Tree, Type, Symbol, NoSymbol, Ident, Select}
- import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol}
+ import global._
+ import global.definitions._
trait MatchAnalyzer extends MatchApproximator {
def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
@@ -638,7 +634,7 @@ trait MatchAnalysis extends MatchApproximation {
def toCounterExample(beBrief: Boolean = false): CounterExample =
if (!allFieldAssignmentsLegal) NoExample
else {
- debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ debug.patmat("describing "+ ((variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal)))
val res = prunedEqualTo match {
// a definite assignment to a value
case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
@@ -659,7 +655,7 @@ trait MatchAnalysis extends MatchApproximation {
cls match {
case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ if isTupleSymbol(cls) => TupleExample(args(brevity = true))
case _ => ConstructorExample(cls, args())
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
index 57fab4eafa..1e4c56529c 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -10,7 +10,6 @@ import scala.tools.nsc.symtab.Flags.SYNTHETIC
import scala.language.postfixOps
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
/** Factory methods used by TreeMakers to make the actual trees.
*
@@ -18,10 +17,7 @@ import scala.reflect.internal.util.NoPosition
* and pure (aka "virtualized": match is parametric in its monad).
*/
trait MatchCodeGen extends Interface {
- import PatternMatchingStats._
- import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol,
- appliedType, NoType, MethodType, newTermName, Name,
- Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef}
+ import global._
import definitions._
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -74,12 +70,12 @@ trait MatchCodeGen extends Interface {
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, any = true, wrapInApply = false)
// duplicated out of frustration with cast generation
def mkZero(tp: Type): Tree = {
tp.typeSymbol match {
- case UnitClass => Literal(Constant())
+ case UnitClass => Literal(Constant(()))
case BooleanClass => Literal(Constant(false))
case FloatClass => Literal(Constant(0.0f))
case DoubleClass => Literal(Constant(0.0d))
@@ -132,7 +128,7 @@ trait MatchCodeGen extends Interface {
// __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
// __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitTpe)), next)
}
}
@@ -255,4 +251,4 @@ trait MatchCodeGen extends Interface {
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
index c570dd8572..9854e4ef62 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -11,7 +11,6 @@ import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
/** Optimize and analyze matches based on their TreeMaker-representation.
*
@@ -20,15 +19,9 @@ import scala.reflect.internal.util.NoPosition
*/
// TODO: split out match analysis
trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
- import PatternMatchingStats._
- import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos,
- ConstantType, Literal, Constant, gen, EmptyTree, distinctBy,
- Typed, treeInfo, nme, Ident,
- Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw}
-
+ import global._
import global.definitions._
-
////
trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator {
/** a flow-sensitive, generalised, common sub-expression elimination
@@ -150,7 +143,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val storedCond = freshSym(pos, BooleanTpe, "rc") setFlag MUTABLE
lazy val treesToHoist: List[Tree] = {
nextBinder setFlag MUTABLE
List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
@@ -160,7 +153,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
- override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+ override def toString = "Memo"+((nextBinder.name, storedCond.name, cond, res, substitution))
}
case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
@@ -199,7 +192,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
// and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
}
- override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+ override def toString = "R"+((lastReusedTreeMaker.storedCond.name, substitution))
}
}
@@ -510,7 +503,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
- val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val switchableTpe = Set(ByteTpe, ShortTpe, IntTpe, CharTpe)
val alternativesSupported = true
val canJump = true
@@ -550,7 +543,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
else {
// match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
val scrutToInt: Tree =
- if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ if (scrutSym.tpe =:= IntTpe) REF(scrutSym)
else (REF(scrutSym) DOT (nme.toInt))
Some(BLOCK(
VAL(scrutSym) === scrut,
@@ -578,16 +571,16 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
}
def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableTpe) => true
case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
case _ => false
}
- lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableTpe)
def defaultBody: Tree = Throw(CODE.REF(defaultSym))
def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableTpe)))) IF guard) ==> body
}}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
index 90c52e3eb6..fcee142932 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -14,14 +14,8 @@ import scala.reflect.internal.util.Statistics
*/
trait MatchTranslation { self: PatternMatching =>
import PatternMatchingStats._
- import global.{phase, currentRun, Symbol,
- Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
- Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
- Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
- Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
- elimAnonymousClass, asCompactDebugString, hasLength}
- import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
- repeatedToSeq, isRepeatedParamType, getProductArgs}
+ import global._
+ import definitions._
import global.analyzer.{ErrorUtils, formalTypes}
trait MatchTranslator extends TreeMakers {
@@ -71,7 +65,7 @@ trait MatchTranslation { self: PatternMatching =>
}
while (it.hasNext) {
- val cdef = it.next
+ val cdef = it.next()
// If a default case has been seen, then every succeeding case is unreachable.
if (vpat != null)
context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
@@ -169,20 +163,20 @@ trait MatchTranslation { self: PatternMatching =>
val bindersAndCases = caseDefs map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val caseScrutSym = freshSym(pos, pureType(ThrowableTpe))
(caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList
if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
}
val catches = if (swatches.nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val scrutSym = freshSym(pos, pureType(ThrowableTpe))
val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
- val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+ val exSym = freshSym(pos, pureType(ThrowableTpe), "ex")
List(
atPos(pos) {
@@ -194,7 +188,7 @@ trait MatchTranslation { self: PatternMatching =>
})
}
- typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ typer.typedCases(catches, ThrowableTpe, WildcardType)
}
@@ -243,7 +237,7 @@ trait MatchTranslation { self: PatternMatching =>
if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
// if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
- debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+ debug.patmat("translateExtractorPattern checking parameter type: "+ ((patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType)))
// must use type `tp`, which is provided by extractor's result, not the type expected by binder,
// as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
@@ -309,7 +303,7 @@ trait MatchTranslation { self: PatternMatching =>
// debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
translateExtractorPattern(ExtractorCall(unfun, args))
- /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ /* A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
It consists of a stable identifier c, followed by element patterns p1, ..., pn.
The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
@@ -328,22 +322,22 @@ trait MatchTranslation { self: PatternMatching =>
noFurtherSubPats()
}
- /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- **/
+ /* A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ */
// must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
case MaybeBoundTyped(subPatBinder, pt) =>
val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
// a typed pattern never has any subtrees
noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
- /** A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- **/
+ /* A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ */
case Bound(subpatBinder, p) =>
// replace subpatBinder by patBinder (as if the Bind was not there)
withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
@@ -351,14 +345,14 @@ trait MatchTranslation { self: PatternMatching =>
(patBinder, p)
)
- /** 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
+ /* 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- **/
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ */
case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
@@ -595,7 +589,7 @@ trait MatchTranslation { self: PatternMatching =>
def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
@@ -623,7 +617,7 @@ trait MatchTranslation { self: PatternMatching =>
// what's the extractor's result type in the monad?
// turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ if (resultType.typeSymbol == BooleanClass) UnitTpe
else matchMonadResult(resultType)
}
@@ -671,4 +665,4 @@ trait MatchTranslation { self: PatternMatching =>
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index 202f3444f8..bce0a077fb 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -11,7 +11,6 @@ import scala.language.postfixOps
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.NoPosition
/** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
*
@@ -19,13 +18,8 @@ import scala.reflect.internal.util.NoPosition
* mostly agnostic to whether we're in optimized/pure (virtualized) mode.
*/
trait MatchTreeMaking extends MatchCodeGen with Debugging {
- import PatternMatchingStats._
- import global.{Tree, Type, Symbol, CaseDef, atPos, settings,
- Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest,
- ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser,
- Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme}
-
- import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+ import global._
+ import definitions._
final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
object Suppression {
@@ -60,7 +54,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
if (currSub ne null) {
- debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ ((this, currSub, outerSubst)))
Thread.dumpStack()
}
else currSub = outerSubst >> substitution
@@ -100,7 +94,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
- override def toString = "B"+(body, matchPt)
+ override def toString = "B"+((body, matchPt))
}
case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
@@ -220,7 +214,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
)
}
- override def toString = "X"+(extractor, nextBinder.name)
+ override def toString = "X"+((extractor, nextBinder.name))
}
/**
@@ -274,7 +268,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
}
}
- override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
+ override def toString = "P"+((prevBinder.name, extraCond getOrElse "", localSubstitution))
}
object IrrefutableExtractorTreeMaker {
@@ -389,7 +383,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
**/
case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
import TypeTestTreeMaker._
- debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+ debug.patmat("TTTM"+((prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp)))
lazy val outerTestNeeded = (
!((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
@@ -420,7 +414,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
// must use == to support e.g. List() == Nil
case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
- case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefTpe
=> eqTest(expTp(CODE.NULL), testedBinder)
case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
@@ -434,7 +428,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// since the types conform, no further checking is required
if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
// have to test outer and non-null only when it's a reference type
- else if (expectedTp <:< AnyRefClass.tpe) {
+ else if (expectedTp <:< AnyRefTpe) {
// do non-null check first to ensure we won't select outer on null
if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
else nonNullTest(testedBinder)
@@ -452,7 +446,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
- override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+ override def toString = "TT"+((expectedTp, testedBinder.name, nextBinderTp))
}
// need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
@@ -463,7 +457,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
val cond = codegen._equals(patTree, prevBinder)
val res = CODE.REF(prevBinder)
- override def toString = "ET"+(prevBinder.name, patTree)
+ override def toString = "ET"+((prevBinder.name, patTree))
}
case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
@@ -474,7 +468,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
altss = altss map (alts => propagateSubstitution(alts, substitution))
}
- def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = {
atPos(pos){
// one alternative may still generate multiple trees (e.g., an extractor call + equality test)
// (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
@@ -482,7 +476,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
)
- val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanTpe)(combinedAlts, Some(x => mkFALSE))
codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
}
}
@@ -528,7 +522,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
val (suppression, requireSwitch): (Suppression, Boolean) =
- if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false)
+ if (settings.XnoPatmatAnalysis) (Suppression.NoSuppression, false)
else scrut match {
case Typed(tree, tpt) =>
val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
@@ -587,18 +581,18 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
t match {
case Function(_, _) if t.symbol == NoSymbol =>
t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- debug.patmat("new symbol for "+ (t, t.symbol.ownerChain))
+ debug.patmat("new symbol for "+ ((t, t.symbol.ownerChain)))
case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ debug.patmat("fundef: "+ ((t, t.symbol.ownerChain, currentOwner.ownerChain)))
t.symbol.owner = currentOwner
case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ debug.patmat("def: "+ ((d, d.symbol.ownerChain, currentOwner.ownerChain)))
if(d.symbol.moduleClass ne NoSymbol)
d.symbol.moduleClass.owner = currentOwner
d.symbol.owner = currentOwner
// case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ debug.patmat("untouched "+ ((t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)))
case _ =>
}
super.traverse(t)
@@ -611,4 +605,4 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// currentRun.trackerFactory.snapshot()
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
index df4e699620..63834ae51e 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -48,9 +48,7 @@ trait PatternMatching extends Transform with TypingTransformers
val phaseName: String = "patmat"
- def newTransformer(unit: CompilationUnit): Transformer =
- if (opt.virtPatmat) new MatchTransformer(unit)
- else noopTransformer
+ def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit)
class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
override def transform(tree: Tree): Tree = tree match {
@@ -105,11 +103,10 @@ trait Interface extends ast.TreeDSL {
import analyzer.Typer
// 2.10/2.11 compatibility
- protected final def dealiasWiden(tp: Type) = tp.dealias // 2.11: dealiasWiden
- protected final def mkTRUE = CODE.TRUE_typed // 2.11: CODE.TRUE
- protected final def mkFALSE = CODE.FALSE_typed // 2.11: CODE.FALSE
- protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
- protected final def devWarning(str: String) = global.debugwarn(str) // 2.11: omit
+ protected final def dealiasWiden(tp: Type) = tp.dealiasWiden
+ protected final def mkTRUE = CODE.TRUE
+ protected final def mkFALSE = CODE.FALSE
+ protected final def hasStableSymbol(p: Tree) = p.hasSymbolField && p.symbol.isStable
object vpmName {
val one = newTermName("one")
@@ -132,8 +129,9 @@ trait Interface extends ast.TreeDSL {
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/** Interface with user-defined match monad?
- * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+ * if there's a <code>__match</code> in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+ {{{
type Matcher[P[_], M[+_], A] = {
def flatMap[B](f: P[A] => M[B]): M[B]
def orElse[B >: A](alternative: => M[B]): M[B]
@@ -147,12 +145,14 @@ trait Interface extends ast.TreeDSL {
def one[T](x: P[T]): M[T]
def guard[T](cond: P[Boolean], then: => P[T]): M[T]
}
+ }}}
* P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
- * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+ * if no <code>__match</code> is found, we assume the following implementation (and generate optimized code accordingly)
+ {{{
object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
def zero = None
def one[T](x: T) = Some(x)
@@ -160,6 +160,7 @@ trait Interface extends ast.TreeDSL {
def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
}
+ }}}
*/
trait MatchMonadInterface {
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 843f831ea1..3c7dc79636 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -8,18 +8,12 @@ package scala.tools.nsc.transform.patmat
import scala.collection.mutable
import scala.reflect.internal.util.Statistics
+import scala.language.postfixOps
// naive CNF translation and simple DPLL solver
trait Solving extends Logic {
import PatternMatchingStats._
trait CNF extends PropositionalLogic {
-
- /** Override Array creation for efficiency (to not go through reflection). */
- private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
- def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
- final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
- }
-
import scala.collection.mutable.ArrayBuffer
type FormulaBuilder = ArrayBuffer[Clause]
def formulaBuilder = ArrayBuffer[Clause]()
@@ -71,7 +65,7 @@ trait Solving extends Logic {
val TrueF = formula()
val FalseF = formula(clause())
def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, false)))
+ def negLit(s: Sym) = formula(clause(Lit(s, pos = false)))
def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
def distribute(a: Formula, b: Formula, budget: Int): Formula =
@@ -164,7 +158,7 @@ trait Solving extends Logic {
else Nil
}
val forced = unassigned flatMap { s =>
- force(Lit(s, true)) ++ force(Lit(s, false))
+ force(Lit(s, pos = true)) ++ force(Lit(s, pos = false))
}
debug.patmat("forced "+ forced)
val negated = negateModel(model)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
index 62c584e97b..567d5d0ecd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala
@@ -66,9 +66,9 @@ trait Adaptations {
)
}
- if (settings.noAdaptedArgs.value)
+ if (settings.noAdaptedArgs)
adaptWarning("No automatic adaptation here: use explicit parentheses.")
- else if (settings.warnAdaptedArgs.value)
+ else if (settings.warnAdaptedArgs)
adaptWarning(
if (args.isEmpty) "Adapting argument list by inserting (): " + (
if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous."
@@ -77,7 +77,7 @@ trait Adaptations {
else "Adapting argument list by creating a " + args.size + "-tuple: this may not be what you want."
)
- !settings.noAdaptedArgs.value
+ !settings.noAdaptedArgs
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index b50486306d..02e1eb6f00 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -16,7 +16,6 @@ trait Analyzer extends AnyRef
with Typers
with Infer
with Implicits
- with Variances
with EtaExpansion
with SyntheticMethods
with Unapplies
@@ -88,22 +87,25 @@ trait Analyzer extends AnyRef
override def run() {
val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null
global.echoPhaseSummary(this)
- currentRun.units foreach applyPhase
- undoLog.clear()
- // need to clear it after as well or 10K+ accumulated entries are
- // uncollectable the rest of the way.
+ for (unit <- currentRun.units) {
+ applyPhase(unit)
+ undoLog.clear()
+ }
if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start)
}
def apply(unit: CompilationUnit) {
try {
- unit.body = newTyper(rootContext(unit)).typed(unit.body)
- if (global.settings.Yrangepos.value && !global.reporter.hasErrors) global.validatePositions(unit.body)
+ val typer = newTyper(rootContext(unit))
+ unit.body = typer.typed(unit.body)
+ if (global.settings.Yrangepos && !global.reporter.hasErrors) global.validatePositions(unit.body)
for (workItem <- unit.toCheck) workItem()
- } finally {
+ if (settings.lint)
+ typer checkUnused unit
+ }
+ finally {
unit.toCheck.clear()
}
}
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
index 28f620dbb5..54e4fefc15 100644
--- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -33,7 +33,7 @@ trait AnalyzerPlugins { self: Analyzer =>
/**
* Let analyzer plugins change the expected type before type checking a tree.
*/
- def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = pt
/**
* Let analyzer plugins modify the type that has been computed for a tree.
@@ -44,7 +44,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* @param mode Mode that was used for typing `tree`
* @param pt Expected type that was used for typing `tree`
*/
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = tpe
/**
* Let analyzer plugins change the types assigned to definitions. For definitions that have
@@ -133,7 +133,7 @@ trait AnalyzerPlugins { self: Analyzer =>
* Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
* given type tp, taking into account the given mode (see method adapt in trait Typers).
*/
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = false
/**
* Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -142,11 +142,11 @@ trait AnalyzerPlugins { self: Analyzer =>
* An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
* class cannot do the adapting, it should return the tree unchanged.
*/
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = tree
/**
* Modify the type of a return expression. By default, return expressions have type
- * NothingClass.tpe.
+ * NothingTpe.
*
* @param tpe The type of the return expression
* @param typer The typer that was used for typing the return tree
@@ -169,13 +169,13 @@ trait AnalyzerPlugins { self: Analyzer =>
/** @see AnalyzerPlugin.pluginsPt */
- def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type =
if (analyzerPlugins.isEmpty) pt
else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
/** @see AnalyzerPlugin.pluginsTyped */
- def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
// support deprecated methods in annotation checkers
val annotCheckersTpe = addAnnotations(tree, tpe)
if (analyzerPlugins.isEmpty) annotCheckersTpe
@@ -196,7 +196,7 @@ trait AnalyzerPlugins { self: Analyzer =>
if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
/** @see AnalyzerPlugin.canAdaptAnnotations */
- def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
// support deprecated methods in annotation checkers
val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
annotCheckersExists || {
@@ -207,7 +207,7 @@ trait AnalyzerPlugins { self: Analyzer =>
}
/** @see AnalyzerPlugin.adaptAnnotations */
- def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = {
// support deprecated methods in annotation checkers
val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
if (analyzerPlugins.isEmpty) annotCheckersTree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
index d30b5c2601..31a31df764 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala
@@ -6,12 +6,8 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
-import symtab.Flags._
-import scala.annotation.tailrec
import Checkability._
+import scala.language.postfixOps
/** On pattern matcher checkability:
*
@@ -66,6 +62,9 @@ trait Checkable {
bases foreach { bc =>
val tps1 = (from baseType bc).typeArgs
val tps2 = (tvarType baseType bc).typeArgs
+ if (tps1.size != tps2.size)
+ devWarning(s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)")
+
(tps1, tps2).zipped foreach (_ =:= _)
// Alternate, variance respecting formulation causes
// neg/unchecked3.scala to fail (abstract types). TODO -
@@ -82,7 +81,7 @@ trait Checkable {
val resArgs = tparams zip tvars map {
case (_, tvar) if tvar.instValid => tvar.constr.inst
- case (tparam, _) => tparam.tpe
+ case (tparam, _) => tparam.tpeHK
}
appliedType(to, resArgs: _*)
}
@@ -112,7 +111,7 @@ trait Checkable {
private class CheckabilityChecker(val X: Type, val P: Type) {
def Xsym = X.typeSymbol
def Psym = P.typeSymbol
- def XR = propagateKnownTypes(X, Psym)
+ def XR = if (Xsym == AnyClass) classExistentialType(Psym) else propagateKnownTypes(X, Psym)
// sadly the spec says (new java.lang.Boolean(true)).isInstanceOf[scala.Boolean]
def P1 = X matchesPattern P
def P2 = !Psym.isPrimitiveValueClass && isNeverSubType(X, P)
@@ -134,7 +133,7 @@ trait Checkable {
else if (P3) RuntimeCheckable
else if (uncheckableType == NoType) {
// Avoid warning (except ourselves) if we can't pinpoint the uncheckable type
- debugwarn("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
+ debuglog("Checkability checker says 'Uncheckable', but uncheckable type cannot be found:\n" + summaryString)
CheckabilityError
}
else Uncheckable
@@ -154,6 +153,7 @@ trait Checkable {
def neverSubClass = isNeverSubClass(Xsym, Psym)
def neverMatches = result == StaticallyFalse
def isUncheckable = result == Uncheckable
+ def isCheckable = !isUncheckable
def uncheckableMessage = uncheckableType match {
case NoType => "something"
case tp @ RefinedType(_, _) => "refinement " + tp
@@ -195,19 +195,27 @@ trait Checkable {
* so I will consult with moors about the optimal time to be doing this.
*/
def areIrreconcilableAsParents(sym1: Symbol, sym2: Symbol): Boolean = areUnrelatedClasses(sym1, sym2) && (
- sym1.initialize.isEffectivelyFinal // initialization important
- || sym2.initialize.isEffectivelyFinal
+ isEffectivelyFinal(sym1) // initialization important
+ || isEffectivelyFinal(sym2)
|| !sym1.isTrait && !sym2.isTrait
|| sym1.isSealed && sym2.isSealed && allChildrenAreIrreconcilable(sym1, sym2) && !currentRun.compiles(sym1) && !currentRun.compiles(sym2)
)
+ private def isEffectivelyFinal(sym: Symbol): Boolean = (
+ // initialization important
+ sym.initialize.isEffectivelyFinal || (
+ settings.future && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final.
+ )
+ )
+
def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2)
private def isNeverSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = /*logResult(s"isNeverSubArgs($tps1, $tps2, $tparams)")*/ {
- def isNeverSubArg(t1: Type, t2: Type, variance: Int) = {
- if (variance > 0) isNeverSubType(t2, t1)
- else if (variance < 0) isNeverSubType(t1, t2)
- else isNeverSameType(t1, t2)
- }
+ def isNeverSubArg(t1: Type, t2: Type, variance: Variance) = (
+ if (variance.isInvariant) isNeverSameType(t1, t2)
+ else if (variance.isCovariant) isNeverSubType(t2, t1)
+ else if (variance.isContravariant) isNeverSubType(t1, t2)
+ else false
+ )
exists3(tps1, tps2, tparams map (_.variance))(isNeverSubArg)
}
private def isNeverSameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
@@ -232,6 +240,17 @@ trait Checkable {
trait InferCheckable {
self: Inferencer =>
+ def isUncheckable(P0: Type) = !isCheckable(P0)
+
+ def isCheckable(P0: Type): Boolean = (
+ uncheckedOk(P0) || (P0.widen match {
+ case TypeRef(_, NothingClass | NullClass | AnyValClass, _) => false
+ case RefinedType(_, decls) if !decls.isEmpty => false
+ case p =>
+ new CheckabilityChecker(AnyTpe, p) isCheckable
+ })
+ )
+
/** TODO: much better error positions.
* Kind of stuck right now because they just pass us the one tree.
* TODO: Eliminate inPattern, canRemedy, which have no place here.
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index 89e2ee44be..56ed0ee16c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -3,10 +3,10 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
-
import java.lang.ArithmeticException
/** This class ...
@@ -18,7 +18,6 @@ abstract class ConstantFolder {
val global: Global
import global._
- import definitions._
/** If tree is a constant operation, replace with result. */
def apply(tree: Tree): Tree = fold(tree, tree match {
@@ -29,9 +28,6 @@ abstract class ConstantFolder {
/** If tree is a constant value that can be converted to type `pt`, perform
* the conversion.
- *
- * @param tree ...
- * @param pt ...
*/
def apply(tree: Tree, pt: Type): Tree = fold(apply(tree), tree.tpe match {
case ConstantType(x) => x convertTo pt
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index b9cff5b2d3..fe1607c631 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -6,55 +6,54 @@
package scala.tools.nsc
package typechecker
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ countElementsAsString, countAsString }
-import symtab.Flags.{ PRIVATE, PROTECTED, IS_ERROR }
+import symtab.Flags.IS_ERROR
import scala.compat.Platform.EOL
import scala.reflect.runtime.ReflectionUtils
import scala.reflect.macros.runtime.AbortMacroException
import scala.util.control.NonFatal
import scala.tools.nsc.util.stackTraceString
+import scala.reflect.io.NoAbstractFile
trait ContextErrors {
self: Analyzer =>
import global._
import definitions._
- import treeInfo._
- object ErrorKinds extends Enumeration {
- type ErrorKind = Value
- val Normal, Access, Ambiguous, Divergent = Value
- }
-
- import ErrorKinds.ErrorKind
-
- trait AbsTypeError extends Throwable {
+ abstract class AbsTypeError extends Throwable {
def errPos: Position
def errMsg: String
- def kind: ErrorKind
+ override def toString() = "[Type error at:" + errPos + "] " + errMsg
}
- case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
- extends AbsTypeError {
-
- def errPos:Position = underlyingTree.pos
- override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg
+ abstract class TreeTypeError extends AbsTypeError {
+ def underlyingTree: Tree
+ def errPos = underlyingTree.pos
}
- case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal)
+ case class NormalTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
+
+ case class AccessTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
+
+ case class AmbiguousTypeError(errPos: Position, errMsg: String)
+ extends AbsTypeError
+
+ case class SymbolTypeError(underlyingSym: Symbol, errMsg: String)
extends AbsTypeError {
def errPos = underlyingSym.pos
}
- case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ case class TypeErrorWrapper(ex: TypeError)
extends AbsTypeError {
def errMsg = ex.msg
def errPos = ex.pos
}
- case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal)
+ case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError)
extends AbsTypeError {
def errMsg = ex.msg
def errPos = tree.pos
@@ -68,19 +67,19 @@ trait ContextErrors {
// (pt at the point of divergence gives less information to the user)
// Note: it is safe to delay error message generation in this case
// becasue we don't modify implicits' infos.
- // only issued when -Xdivergence211 is turned on
- case class DivergentImplicitTypeError(tree: Tree, pt0: Type, sym: Symbol) extends AbsTypeError {
- def errPos: Position = tree.pos
+ case class DivergentImplicitTypeError(underlyingTree: Tree, pt0: Type, sym: Symbol)
+ extends TreeTypeError {
def errMsg: String = errMsgForPt(pt0)
- def kind = ErrorKinds.Divergent
- def withPt(pt: Type): AbsTypeError = NormalTypeError(tree, errMsgForPt(pt), kind)
+ def withPt(pt: Type): AbsTypeError = this.copy(pt0 = pt)
private def errMsgForPt(pt: Type) =
s"diverging implicit expansion for type ${pt}\nstarting with ${sym.fullLocationString}"
}
- case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError
+ case class AmbiguousImplicitTypeError(underlyingTree: Tree, errMsg: String)
+ extends TreeTypeError
- case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError
+ case class PosAndMsgTypeError(errPos: Position, errMsg: String)
+ extends AbsTypeError
object ErrorUtils {
def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) {
@@ -91,22 +90,13 @@ trait ContextErrors {
issueTypeError(SymbolTypeError(sym, msg))
}
- // only called when -Xdivergence211 is turned off
- def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) {
- issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent))
- }
-
def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) {
context.issueAmbiguousError(pre, sym1, sym2, err)
}
def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) }
- def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = {
- def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else ""
-
- "type mismatch" + foundReqMsg(found, req) + missingArgsMsg
- }
+ def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req)
}
def notAnyRefMessage(found: Type): String = {
@@ -147,7 +137,7 @@ trait ContextErrors {
}
issueNormalTypeError(tree,
"stable identifier required, but "+tree+" found." + (
- if (isStableExceptVolatile(tree)) addendum else ""))
+ if (treeInfo.hasVolatileType(tree)) addendum else ""))
setError(tree)
}
@@ -172,11 +162,10 @@ trait ContextErrors {
// members present, then display along with the expected members. This is done here because
// this is the last point where we still have access to the original tree, rather than just
// the found/req types.
- val foundType: Type = req.normalize match {
+ val foundType: Type = req.dealiasWiden match {
case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass =>
- val retyped = typed (tree.duplicate setType null)
+ val retyped = typed (tree.duplicate.clearType())
val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic)
-
if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found
else {
// The members arrive marked private, presumably because there was no
@@ -190,11 +179,10 @@ trait ContextErrors {
case _ =>
found
}
- assert(!found.isErroneous && !req.isErroneous, (found, req))
+ assert(!foundType.isErroneous && !req.isErroneous, (foundType, req))
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) )
- if (settings.explaintypes.value)
- explainTypes(found, req)
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(foundType, req)))
+ infer.explainTypes(foundType, req)
}
def WithFilterError(tree: Tree, ex: AbsTypeError) = {
@@ -203,14 +191,18 @@ trait ContextErrors {
}
def ParentTypesError(templ: Template, ex: TypeError) = {
- templ.tpe = null
- issueNormalTypeError(templ, ex.getMessage())
+ templ.clearType()
+ issueNormalTypeError(templ, ex.getMessage())
+ setError(templ)
}
// additional parentTypes errors
- def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
+ def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) =
issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
+ def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) =
+ issueNormalTypeError(arg, "parents of traits may not have parameters")
+
def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
issueNormalTypeError(supertpt, "missing type arguments")
@@ -318,7 +310,7 @@ trait ContextErrors {
val target = qual.tpe.widen
def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else ""
def nameString = decodeWithKind(name, owner)
- /** Illuminating some common situations and errors a bit further. */
+ /* Illuminating some common situations and errors a bit further. */
def addendum = {
val companion = {
if (name.isTermName && owner.isPackageClass) {
@@ -468,7 +460,7 @@ trait ContextErrors {
def AbstractionFromVolatileTypeError(vd: ValDef) =
issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe)
- private[ContextErrors] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
+ private[scala] def TypedApplyWrongNumberOfTpeParametersErrorMessage(fun: Tree) =
"wrong number of type parameters for "+treeSymTypeMsg(fun)
def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = {
@@ -484,7 +476,7 @@ trait ContextErrors {
// doTypeApply
//tryNamesDefaults
def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) =
- NormalTypeError(tree, "macros application do not support named and/or default arguments")
+ NormalTypeError(tree, "macro applications do not support named and/or default arguments")
def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) =
NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun))
@@ -532,7 +524,7 @@ trait ContextErrors {
NormalTypeError(tree, fun.tpe+" does not take parameters")
// Dynamic
- def DynamicVarArgUnsupported(tree: Tree, name: String) =
+ def DynamicVarArgUnsupported(tree: Tree, name: Name) =
issueNormalTypeError(tree, name+ " does not support passing a vararg parameter")
def DynamicRewriteError(tree: Tree, err: AbsTypeError) = {
@@ -578,11 +570,13 @@ trait ContextErrors {
//adapt
def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = {
- issueNormalTypeError(tree,
- "missing arguments for " + meth.fullLocationString + (
+ val message =
+ if (meth.isMacro) MacroTooFewArgumentListsMessage
+ else "missing arguments for " + meth.fullLocationString + (
if (meth.isConstructor) ""
else ";\nfollow this method with `_' if you want to treat it as a partially applied function"
- ))
+ )
+ issueNormalTypeError(tree, message)
setError(tree)
}
@@ -663,7 +657,7 @@ trait ContextErrors {
val addendums = List(
if (sym0.associatedFile eq sym1.associatedFile)
Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath))
- else if ((sym0.associatedFile ne null) && (sym1.associatedFile ne null))
+ else if ((sym0.associatedFile ne NoAbstractFile) && (sym1.associatedFile ne NoAbstractFile))
Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath))
else None ,
if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None
@@ -680,8 +674,8 @@ trait ContextErrors {
def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) =
issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0))
- def CyclicReferenceError(errPos: Position, lockedSym: Symbol) =
- issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym))
+ def CyclicReferenceError(errPos: Position, tp: Type, lockedSym: Symbol) =
+ issueTypeError(PosAndMsgTypeError(errPos, s"illegal cyclic reference involving $tp and $lockedSym"))
// macro-related errors (also see MacroErrors below)
@@ -690,26 +684,42 @@ trait ContextErrors {
setError(tree)
}
- // same reason as for MacroBodyTypecheckException
+ def MacroTooManyArgumentListsError(expandee: Tree, fun: Symbol) = {
+ NormalTypeError(expandee, "too many argument lists for " + fun)
+ }
+
+ def MacroInvalidExpansionError(expandee: Tree, role: String, allowedExpansions: String) = {
+ issueNormalTypeError(expandee, s"macro in $role role can only expand into $allowedExpansions")
+ }
+
case object MacroExpansionException extends Exception with scala.util.control.ControlThrowable
- private def macroExpansionError(expandee: Tree, msg: String = null, pos: Position = NoPosition) = {
+ protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
macroLogLite("macro expansion has failed: %s".format(msgForLog))
- val errorPos = if (pos != NoPosition) pos else (if (expandee.pos != NoPosition) expandee.pos else enclosingMacroPosition)
if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
setError(expandee)
throw MacroExpansionException
}
- def MacroPartialApplicationError(expandee: Tree) = {
+ private def macroExpansionError2(expandee: Tree, msg: String) = {
// macroExpansionError won't work => swallows positions, hence needed to do issueTypeError
// kinda contradictory to the comment in `macroExpansionError`, but this is how it works
- issueNormalTypeError(expandee, "macros cannot be partially applied")
+ issueNormalTypeError(expandee, msg)
setError(expandee)
throw MacroExpansionException
}
+ private def MacroTooFewArgumentListsMessage = "too few argument lists for macro invocation"
+ def MacroTooFewArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooFewArgumentListsMessage)
+
+ private def MacroTooManyArgumentListsMessage = "too many argument lists for macro invocation"
+ def MacroTooManyArgumentListsError(expandee: Tree) = macroExpansionError2(expandee, MacroTooManyArgumentListsMessage)
+
+ def MacroTooFewArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too few arguments for macro invocation")
+
+ def MacroTooManyArgumentsError(expandee: Tree) = macroExpansionError2(expandee, "too many arguments for macro invocation")
+
def MacroGeneratedAbort(expandee: Tree, ex: AbortMacroException) = {
// errors have been reported by the macro itself, so we do nothing here
macroLogVerbose("macro expansion has been aborted")
@@ -771,23 +781,26 @@ trait ContextErrors {
macroExpansionError(expandee, template(sym.name.nameKind).format(sym.name + " " + sym.origin, forgotten))
}
- def MacroExpansionIsNotExprError(expandee: Tree, expanded: Any) =
+ def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = {
+ val expected = "expr"
+ val isPathMismatch = expanded != null && expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
macroExpansionError(expandee,
- "macro must return a compiler-specific expr; returned value is " + (
+ s"macro must return a compiler-specific $expected; returned value is " + (
if (expanded == null) "null"
- else if (expanded.isInstanceOf[Expr[_]]) " Expr, but it doesn't belong to this compiler's universe"
+ else if (isPathMismatch) s" $expected, but it doesn't belong to this compiler"
else " of " + expanded.getClass
))
-
- def MacroImplementationNotFoundError(expandee: Tree) = {
- val message =
- "macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
- (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
- else "")
- macroExpansionError(expandee, message)
}
+
+ def MacroImplementationNotFoundError(expandee: Tree) =
+ macroExpansionError(expandee, macroImplementationNotFoundMessage(expandee.symbol.name))
}
+
+ /** This file will be the death of me. */
+ protected def macroImplementationNotFoundMessage(name: Name): String = (
+ s"""|macro implementation not found: $name
+ |(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)""".stripMargin
+ )
}
trait InferencerContextErrors {
@@ -829,14 +842,17 @@ trait ContextErrors {
)
}
- def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = {
+ def AccessError(tree: Tree, sym: Symbol, ctx: Context, explanation: String): AbsTypeError =
+ AccessError(tree, sym, ctx.enclClass.owner.thisType, ctx.enclClass.owner, explanation)
+
+ def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String): AbsTypeError = {
def errMsg = {
val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString
underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
location + explanation
}
- NormalTypeError(tree, errMsg, ErrorKinds.Access)
+ AccessTypeError(tree, errMsg)
}
def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) =
@@ -881,7 +897,7 @@ trait ContextErrors {
"argument types " + argtpes.mkString("(", ",", ")") +
(if (pt == WildcardType) "" else " and expected result type " + pt)
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
setErrorOnLastTry(lastTry, tree)
} else setError(tree) // do not even try further attempts because they should all fail
// even if this is not the last attempt (because of the SO's possibility on the horizon)
@@ -889,13 +905,13 @@ trait ContextErrors {
}
def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = {
- issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt))))
+ issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt)))
setErrorOnLastTry(lastTry, tree)
}
def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type, lastTry: Boolean) = {
val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt)
- issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg))
+ issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(pos, msg))
setErrorOnLastTry(lastTry, tree)
}
@@ -909,7 +925,7 @@ trait ContextErrors {
kindErrors.toList.mkString("\n", ", ", ""))
}
- private[ContextErrors] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
+ private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = {
if (explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
(targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
@@ -925,7 +941,7 @@ trait ContextErrors {
def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type],
tparams: List[Symbol], kindErrors: List[String]) =
issueNormalTypeError(tree,
- NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes.value))
+ NotWithinBoundsErrorMessage(prefix, targs, tparams, settings.explaintypes))
//substExpr
def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) =
@@ -1033,20 +1049,14 @@ trait ContextErrors {
val s1 = if (prevSym.isModule) "case class companion " else ""
val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else ""
val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym
- val where = if (currentSym.owner.isPackageClass != prevSym.owner.isPackageClass) {
- val inOrOut = if (prevSym.owner.isPackageClass) "outside of" else "in"
+ val where = if (currentSym.isTopLevel != prevSym.isTopLevel) {
+ val inOrOut = if (prevSym.isTopLevel) "outside of" else "in"
" %s package object %s".format(inOrOut, ""+prevSym.effectiveOwner.name)
} else ""
issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3 + where)
}
- def MaxParametersCaseClassError(tree: Tree) =
- issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
-
- def InheritsItselfError(tree: Tree) =
- issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
-
def MissingParameterOrValTypeError(vparam: Tree) =
issueNormalTypeError(vparam, "missing parameter type")
@@ -1097,11 +1107,11 @@ trait ContextErrors {
def AbstractMemberWithModiferError(sym: Symbol, flag: Int) =
- issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier")
+ issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag.toLong) + " modifier")
def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) =
issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format(
- Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym))
+ Flags.flagsToString(flag1.toLong), Flags.flagsToString(flag2.toLong), sym))
def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = {
val errorAddendum =
@@ -1143,7 +1153,7 @@ trait ContextErrors {
// failures which have nothing to do with implicit conversions
// per se, but which manifest as implicit conversion conflicts
// involving Any, are further explained from foundReqMsg.
- if (AnyRefClass.tpe <:< req) (
+ if (AnyRefTpe <:< req) (
if (sym == AnyClass || sym == UnitClass) (
sm"""|Note: ${sym.name} is not implicitly converted to AnyRef. You can safely
|pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so."""
@@ -1159,11 +1169,11 @@ trait ContextErrors {
sm"""|Note that implicit conversions are not applicable because they are ambiguous:
|${coreMsg}are possible conversion functions from $found to $req"""
}
- typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + (
+ typeErrorMsg(found, req) + (
if (explanation == "") "" else "\n" + explanation
)
}
- context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos,
+ context.issueAmbiguousError(AmbiguousImplicitTypeError(tree,
if (isView) viewMsg
else s"ambiguous implicit values:\n${coreMsg}match expected type $pt")
)
@@ -1171,13 +1181,7 @@ trait ContextErrors {
}
def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) =
- if (settings.Xdivergence211.value) {
- issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
- } else {
- issueDivergentImplicitsError(tree,
- "diverging implicit expansion for type "+pt+"\nstarting with "+
- sym.fullLocationString)
- }
+ issueTypeError(DivergentImplicitTypeError(tree, pt, sym))
}
object NamesDefaultsErrorsGen {
@@ -1228,141 +1232,4 @@ trait ContextErrors {
setError(arg)
}
}
-
- // using an exception here is actually a good idea
- // because the lifespan of this exception is extremely small and controlled
- // moreover exceptions let us avoid an avalanche of "if (!hasError) do stuff" checks
- case object MacroBodyTypecheckException extends Exception with scala.util.control.ControlThrowable
-
- trait MacroErrors {
- self: MacroTyper =>
-
- private implicit val context0 = typer.context
- val context = typer.context
-
- // helpers
-
- private def lengthMsg(flavor: String, violation: String, extra: Symbol) = {
- val noun = if (flavor == "value") "parameter" else "type parameter"
- val message = noun + " lists have different length, " + violation + " extra " + noun
- val suffix = if (extra ne NoSymbol) " " + extra.defString else ""
- message + suffix
- }
-
- private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
-
- private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
- var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
- if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
- var retPart = restpe.toString
- if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
- argsPart + ": " + retPart
- }
-
- // not exactly an error generator, but very related
- // and I dearly wanted to push it away from Macros.scala
- private def checkSubType(slot: String, rtpe: Type, atpe: Type) = {
- val ok = if (macroDebugVerbose || settings.explaintypes.value) {
- if (rtpe eq atpe) println(rtpe + " <: " + atpe + "?" + EOL + "true")
- withTypesExplained(rtpe <:< atpe)
- } else rtpe <:< atpe
- if (!ok) {
- compatibilityError("type mismatch for %s: %s does not conform to %s".format(slot, abbreviateCoreAliases(rtpe.toString), abbreviateCoreAliases(atpe.toString)))
- }
- }
-
- // errors
-
- private def fail() = {
- // need to set the IS_ERROR flag to prohibit spurious expansions
- if (macroDef != null) macroDef setFlag IS_ERROR
- // not setting ErrorSymbol as in `infer.setError`, because we still need to know that it's a macro
- // otherwise assignTypeToTree in Namers might fail if macroDdef.tpt == EmptyTree
- macroDdef setType ErrorType
- throw MacroBodyTypecheckException
- }
-
- private def genericError(tree: Tree, message: String) = {
- issueNormalTypeError(tree, message)
- fail()
- }
-
- private def implRefError(message: String) = {
- val treeInfo.Applied(implRef, _, _) = macroDdef.rhs
- genericError(implRef, message)
- }
-
- private def compatibilityError(message: String) =
- implRefError(
- "macro implementation has wrong shape:"+
- "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
- "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
- "\n" + message)
-
- // Phase I: sanity checks
-
- def MacroDefIsFastTrack() = {
- macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
- assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
- throw MacroBodyTypecheckException // don't call fail, because we don't need IS_ERROR
- }
-
- def MacroDefIsQmarkQmarkQmark() = {
- macroLogVerbose("typecheck terminated unexpectedly: macro is ???")
- throw MacroBodyTypecheckException
- }
-
- def MacroFeatureNotEnabled() = {
- macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
- fail()
- }
-
- // Phase II: typecheck the right-hand side of the macro def
-
- // do nothing, just fail. relevant typecheck errors have already been reported
- def MacroDefUntypeableBodyError() = fail()
-
- def MacroDefInvalidBodyError() = genericError(macroDdef, "macro body has wrong shape:\n required: macro [<implementation object>].<method name>[[<type args>]]")
-
- def MacroImplNotPublicError() = implRefError("macro implementation must be public")
-
- def MacroImplOverloadedError() = implRefError("macro implementation cannot be overloaded")
-
- def MacroImplWrongNumberOfTypeArgumentsError(macroImplRef: Tree) = implRefError(typer.TyperErrorGen.TypedApplyWrongNumberOfTpeParametersErrorMessage(macroImplRef))
-
- def MacroImplNotStaticError() = implRefError("macro implementation must be in statically accessible object")
-
- // Phase III: check compatibility between the macro def and its macro impl
- // aXXX (e.g. aparams) => characteristics of the macro impl ("a" stands for "actual")
- // rXXX (e.g. rparams) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
-
- def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
-
- def MacroImplParamssMismatchError() = compatibilityError("number of parameter sections differ")
-
- def MacroImplExtraParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(lengthMsg("value", "found", aparams(rparams.length)))
-
- def MacroImplMissingParamsError(aparams: List[Symbol], rparams: List[Symbol]) = compatibilityError(abbreviateCoreAliases(lengthMsg("value", "required", rparams(aparams.length))))
-
- def checkMacroImplParamTypeMismatch(atpe: Type, rparam: Symbol) = checkSubType("parameter " + rparam.name, rparam.tpe, atpe)
-
- def checkMacroImplResultTypeMismatch(atpe: Type, rret: Type) = checkSubType("return type", atpe, rret)
-
- def MacroImplParamNameMismatchError(aparam: Symbol, rparam: Symbol) = compatibilityError("parameter names differ: " + rparam.name + " != " + aparam.name)
-
- def MacroImplVarargMismatchError(aparam: Symbol, rparam: Symbol) = {
- if (isRepeated(rparam) && !isRepeated(aparam))
- compatibilityError("types incompatible for parameter " + rparam.name + ": corresponding is not a vararg parameter")
- if (!isRepeated(rparam) && isRepeated(aparam))
- compatibilityError("types incompatible for parameter " + aparam.name + ": corresponding is not a vararg parameter")
- }
-
- def MacroImplTargMismatchError(atargs: List[Type], atparams: List[Symbol]) =
- compatibilityError(typer.infer.InferErrorGen.NotWithinBoundsErrorMessage("", atargs, atparams, macroDebugVerbose || settings.explaintypes.value))
-
- def MacroImplTparamInstantiationError(atparams: List[Symbol], ex: NoInstance) =
- compatibilityError(
- "type parameters "+(atparams map (_.defString) mkString ", ")+" cannot be instantiated\n"+
- ex.getMessage)
- }
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 3fe98ed127..61967d4cee 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -6,9 +6,9 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags._
-import scala.collection.mutable.{LinkedHashSet, Set}
+import scala.collection.{ immutable, mutable }
import scala.annotation.tailrec
+import scala.reflect.internal.util.shortClassOfInstance
/**
* @author Martin Odersky
@@ -16,25 +16,35 @@ import scala.annotation.tailrec
*/
trait Contexts { self: Analyzer =>
import global._
+ import definitions.{ JavaLangPackage, ScalaPackage, PredefModule }
+ import ContextMode._
- object NoContext extends Context {
- outer = this
+ object NoContext
+ extends Context(EmptyTree, NoSymbol, EmptyScope, NoCompilationUnit,
+ null) { // We can't pass the uninitialized `this`. Instead, we treat null specially in `Context#outer`
enclClass = this
enclMethod = this
+ override val depth = 0
override def nextEnclosing(p: Context => Boolean): Context = this
override def enclosingContextChain: List[Context] = Nil
override def implicitss: List[List[ImplicitInfo]] = Nil
+ override def imports: List[ImportInfo] = Nil
+ override def firstImport: Option[ImportInfo] = None
override def toString = "NoContext"
}
private object RootImports {
- import definitions._
// Possible lists of root imports
val javaList = JavaLangPackage :: Nil
val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
+ def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) =
+ LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2")
+ def ambiguousDefnAndImport(owner: Symbol, imp: ImportInfo) =
+ LookupAmbiguous(s"it is both defined in $owner and imported subsequently by \n$imp")
+
private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
@@ -42,6 +52,25 @@ trait Contexts { self: Analyzer =>
rootMirror.RootClass.info.decls)
}
+ private lazy val allUsedSelectors =
+ mutable.Map[ImportInfo, Set[ImportSelector]]() withDefaultValue Set()
+ private lazy val allImportInfos =
+ mutable.Map[CompilationUnit, List[ImportInfo]]() withDefaultValue Nil
+
+ def warnUnusedImports(unit: CompilationUnit) = {
+ for (imps <- allImportInfos.remove(unit)) {
+ for (imp <- imps.reverse.distinct) {
+ val used = allUsedSelectors(imp)
+ def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD
+
+ imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel =>
+ unit.warning(imp posOf sel, "Unused import")
+ }
+ }
+ allUsedSelectors --= imps
+ }
+ }
+
var lastAccessCheckDetails: String = ""
/** List of symbols to import from in a root context. Typically that
@@ -55,292 +84,393 @@ trait Contexts { self: Analyzer =>
protected def rootImports(unit: CompilationUnit): List[Symbol] = {
assert(definitions.isDefinitionsInitialized, "definitions uninitialized")
- if (settings.noimports.value) Nil
+ if (settings.noimports) Nil
else if (unit.isJava) RootImports.javaList
- else if (settings.nopredef.value || treeInfo.noPredefImportForUnit(unit.body)) RootImports.javaAndScalaList
+ else if (settings.nopredef || treeInfo.noPredefImportForUnit(unit.body)) {
+ debuglog("Omitted import of Predef._ for " + unit)
+ RootImports.javaAndScalaList
+ }
else RootImports.completeList
}
- def rootContext(unit: CompilationUnit): Context = rootContext(unit, EmptyTree, false)
- def rootContext(unit: CompilationUnit, tree: Tree): Context = rootContext(unit, tree, false)
- def rootContext(unit: CompilationUnit, tree: Tree, erasedTypes: Boolean): Context = {
- import definitions._
- var sc = startContext
- for (sym <- rootImports(unit)) {
- sc = sc.makeNewImport(sym)
- sc.depth += 1
- }
- val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports)
+ def rootContext(unit: CompilationUnit, tree: Tree = EmptyTree, erasedTypes: Boolean = false): Context = {
+ val rootImportsContext = (startContext /: rootImports(unit))((c, sym) => c.make(gen.mkWildcardImport(sym)))
+ val c = rootImportsContext.make(tree, unit = unit)
if (erasedTypes) c.setThrowErrors() else c.setReportErrors()
- c.implicitsEnabled = !erasedTypes
- c.enrichmentEnabled = c.implicitsEnabled
+ c(EnrichmentEnabled | ImplicitsEnabled) = !erasedTypes
c
}
def resetContexts() {
- var sc = startContext
- while (sc != NoContext) {
- sc.tree match {
- case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
- case _ =>
+ startContext.enclosingContextChain foreach { context =>
+ context.tree match {
+ case Import(qual, _) => qual setType singleType(qual.symbol.owner.thisType, qual.symbol)
+ case _ =>
}
- sc.flushAndReturnBuffer()
- sc.flushAndReturnWarningsBuffer()
- sc = sc.outer
+ context.reportBuffer.clearAll()
}
}
- private object Errors {
- final val ReportErrors = 1 << 0
- final val BufferErrors = 1 << 1
- final val AmbiguousErrors = 1 << 2
- final val notThrowMask = ReportErrors | BufferErrors
- final val AllMask = ReportErrors | BufferErrors | AmbiguousErrors
- }
+ /**
+ * A motley collection of the state and loosely associated behaviour of the type checker.
+ * Each `Typer` has an associated context, and as it descends into the tree new `(Typer, Context)`
+ * pairs are spawned.
+ *
+ * Meet the crew; first the state:
+ *
+ * - A tree, symbol, and scope representing the focus of the typechecker
+ * - An enclosing context, `outer`.
+ * - The current compilation unit.
+ * - A variety of bits that track the current error reporting policy (more on this later);
+ * whether or not implicits/macros are enabled, whether we are in a self or super call or
+ * in a constructor suffix. These are represented as bits in the mask `contextMode`.
+ * - Some odds and ends: undetermined type pararameters of the current line of type inference;
+ * contextual augmentation for error messages, tracking of the nesting depth.
+ *
+ * And behaviour:
+ *
+ * - The central point for issuing errors and warnings from the typechecker, with a means
+ * to buffer these for use in 'silent' type checking, when some recovery might be possible.
+ * - `Context` is something of a Zipper for the tree were are typechecking: it `enclosingContextChain`
+ * is the path back to the root. This is exactly what we need to resolve names (`lookupSymbol`)
+ * and to collect in-scope implicit defintions (`implicitss`)
+ * Supporting these are `imports`, which represents all `Import` trees in in the enclosing context chain.
+ * - In a similar vein, we can assess accessiblity (`isAccessible`.)
+ *
+ * More on error buffering:
+ * When are type errors recoverable? In quite a few places, it turns out. Some examples:
+ * trying to type an application with/without the expected type, or with/without implicit views
+ * enabled. This is usually mediated by `Typer.silent`, `Inferencer#tryTwice`.
+ *
+ * Intially, starting from the `typer` phase, the contexts either buffer or report errors;
+ * afterwards errors are thrown. This is configured in `rootContext`. Additionally, more
+ * fine grained control is needed based on the kind of error; ambiguity errors are often
+ * suppressed during exploraratory typing, such as determining whether `a == b` in an argument
+ * position is an assignment or a named argument, when `Infererencer#isApplicableSafe` type checks
+ * applications with and without an expected type, or whtn `Typer#tryTypedApply` tries to fit arguments to
+ * a function type with/without implicit views.
+ *
+ * When the error policies entails error/warning buffering, the mutable [[ReportBuffer]] records
+ * everything that is issued. It is important to note, that child Contexts created with `make`
+ * "inherit" the very same `ReportBuffer` instance, whereas children spawned through `makeSilent`
+ * receive an separate, fresh buffer.
+ *
+ * @param tree Tree associated with this context
+ * @param owner The current owner
+ * @param scope The current scope
+ * @param _outer The next outer context.
+ */
+ class Context private[typechecker](val tree: Tree, val owner: Symbol, val scope: Scope,
+ val unit: CompilationUnit, _outer: Context) {
+ private def outerIsNoContext = _outer eq null
+ final def outer: Context = if (outerIsNoContext) NoContext else _outer
- class Context private[typechecker] {
- import Errors._
-
- var unit: CompilationUnit = NoCompilationUnit
- var tree: Tree = _ // Tree associated with this context
- var owner: Symbol = NoSymbol // The current owner
- var scope: Scope = _ // The current scope
- var outer: Context = _ // The next outer context
- var enclClass: Context = _ // The next outer context whose tree is a
- // template or package definition
- @inline final def savingEnclClass[A](c: Context)(a: => A): A = {
+ /** The next outer context whose tree is a template or package definition */
+ var enclClass: Context = _
+
+ @inline private def savingEnclClass[A](c: Context)(a: => A): A = {
val saved = enclClass
enclClass = c
try a finally enclClass = saved
}
- var enclMethod: Context = _ // The next outer context whose tree is a method
- var variance: Int = _ // Variance relative to enclosing class
- private var _undetparams: List[Symbol] = List() // Undetermined type parameters,
- // not inherited to child contexts
- var depth: Int = 0
- var imports: List[ImportInfo] = List() // currently visible imports
- var openImplicits: List[OpenImplicit] = List() // types for which implicit arguments
- // are currently searched
- // for a named application block (Tree) the corresponding NamedApplyInfo
+ /** A bitmask containing all the boolean flags in a context, e.g. are implicit views enabled */
+ var contextMode: ContextMode = ContextMode.DefaultMode
+
+ /** Update all modes in `mask` to `value` */
+ def update(mask: ContextMode, value: Boolean) {
+ contextMode = contextMode.set(value, mask)
+ }
+
+ /** Set all modes in the mask `enable` to true, and all in `disable` to false. */
+ def set(enable: ContextMode = NOmode, disable: ContextMode = NOmode): this.type = {
+ contextMode = contextMode.set(true, enable).set(false, disable)
+ this
+ }
+
+ /** Is this context in all modes in the given `mask`? */
+ def apply(mask: ContextMode): Boolean = contextMode.inAll(mask)
+
+ /** The next outer context whose tree is a method */
+ var enclMethod: Context = _
+
+ /** Variance relative to enclosing class */
+ var variance: Variance = Variance.Invariant
+
+ private var _undetparams: List[Symbol] = List()
+
+ protected def outerDepth = if (outerIsNoContext) 0 else outer.depth
+
+ val depth: Int = {
+ val increasesDepth = isRootImport || outerIsNoContext || (outer.scope != scope)
+ ( if (increasesDepth) 1 else 0 ) + outerDepth
+ }
+
+ /** The currently visible imports */
+ def imports: List[ImportInfo] = outer.imports
+ /** Equivalent to `imports.headOption`, but more efficient */
+ def firstImport: Option[ImportInfo] = outer.firstImport
+ def isRootImport: Boolean = false
+
+ /** Types for which implicit arguments are currently searched */
+ var openImplicits: List[OpenImplicit] = List()
+
+ /* For a named application block (`Tree`) the corresponding `NamedApplyInfo`. */
var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None
var prefix: Type = NoPrefix
- var inConstructorSuffix = false // are we in a secondary constructor
- // after the this constructor call?
- var returnsSeen = false // for method context: were returns encountered?
- var inSelfSuperCall = false // is this context (enclosed in) a constructor call?
- // (the call to the super or self constructor in the first line of a constructor)
- // in this context the object's fields should not be in scope
-
- var diagnostic: List[String] = Nil // these messages are printed when issuing an error
- var implicitsEnabled = false
- var macrosEnabled = true
- var enrichmentEnabled = false // to selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed
- var checking = false
- var retyping = false
-
- var savedTypeBounds: List[(Symbol, Type)] = List() // saved type bounds
- // for type parameters which are narrowed in a GADT
+ def inSuperInit_=(value: Boolean) = this(SuperInit) = value
+ def inSuperInit = this(SuperInit)
+ def inConstructorSuffix_=(value: Boolean) = this(ConstructorSuffix) = value
+ def inConstructorSuffix = this(ConstructorSuffix)
+ def inPatAlternative_=(value: Boolean) = this(PatternAlternative) = value
+ def inPatAlternative = this(PatternAlternative)
+ def starPatterns_=(value: Boolean) = this(StarPatterns) = value
+ def starPatterns = this(StarPatterns)
+ def returnsSeen_=(value: Boolean) = this(ReturnsSeen) = value
+ def returnsSeen = this(ReturnsSeen)
+ def inSelfSuperCall_=(value: Boolean) = this(SelfSuperCall) = value
+ def inSelfSuperCall = this(SelfSuperCall)
+ def implicitsEnabled_=(value: Boolean) = this(ImplicitsEnabled) = value
+ def implicitsEnabled = this(ImplicitsEnabled)
+ def macrosEnabled_=(value: Boolean) = this(MacrosEnabled) = value
+ def macrosEnabled = this(MacrosEnabled)
+ def enrichmentEnabled_=(value: Boolean) = this(EnrichmentEnabled) = value
+ def enrichmentEnabled = this(EnrichmentEnabled)
+ def checking_=(value: Boolean) = this(Checking) = value
+ def checking = this(Checking)
+ def retyping_=(value: Boolean) = this(ReTyping) = value
+ def retyping = this(ReTyping)
+ def inSecondTry = this(SecondTry)
+ def inSecondTry_=(value: Boolean) = this(SecondTry) = value
+ def inReturnExpr = this(ReturnExpr)
+ def inTypeConstructorAllowed = this(TypeConstructorAllowed)
+
+ def defaultModeForTyped: Mode = if (inTypeConstructorAllowed) Mode.NOmode else Mode.EXPRmode
+
+ /** These messages are printed when issuing an error */
+ var diagnostic: List[String] = Nil
+
+ /** Saved type bounds for type parameters which are narrowed in a GADT. */
+ var savedTypeBounds: List[(Symbol, Type)] = List()
+
+ /** Indentation level, in columns, for output under -Ytyper-debug */
var typingIndentLevel: Int = 0
def typingIndent = " " * typingIndentLevel
- var buffer: Set[AbsTypeError] = _
- var warningsBuffer: Set[(Position, String)] = _
-
+ /** The next enclosing context (potentially `this`) that is owned by a class or method */
def enclClassOrMethod: Context =
if ((owner eq NoSymbol) || (owner.isClass) || (owner.isMethod)) this
else outer.enclClassOrMethod
+ /** The next enclosing context (potentially `this`) that has a `CaseDef` as a tree */
+ def enclosingCaseDef = nextEnclosing(_.tree.isInstanceOf[CaseDef])
+
+ /** ...or an Apply. */
+ def enclosingApply = nextEnclosing(_.tree.isInstanceOf[Apply])
+
+ //
+ // Tracking undetermined type parameters for type argument inference.
+ //
def undetparamsString =
if (undetparams.isEmpty) ""
else undetparams.mkString("undetparams=", ", ", "")
- def undetparams = _undetparams
+ /** Undetermined type parameters. See `Infer#{inferExprInstance, adjustTypeArgs}`. Not inherited to child contexts */
+ def undetparams: List[Symbol] = _undetparams
def undetparams_=(ps: List[Symbol]) = { _undetparams = ps }
- def extractUndetparams() = {
+ /** Return and clear the undetermined type parameters */
+ def extractUndetparams(): List[Symbol] = {
val tparams = undetparams
undetparams = List()
tparams
}
- private[this] var mode = 0
-
- def errBuffer = buffer
- def hasErrors = buffer.nonEmpty
- def hasWarnings = warningsBuffer.nonEmpty
-
- def state: Int = mode
- def restoreState(state0: Int) = mode = state0
-
- def reportErrors = (state & ReportErrors) != 0
- def bufferErrors = (state & BufferErrors) != 0
- def ambiguousErrors = (state & AmbiguousErrors) != 0
- def throwErrors = (state & notThrowMask) == 0
-
- def setReportErrors() = mode = (ReportErrors | AmbiguousErrors)
- def setBufferErrors() = {
- //assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer)
- mode = BufferErrors
+ /** Run `body` with this context with no undetermined type parameters, restore the original
+ * the original list afterwards.
+ * @param reportAmbiguous Should ambiguous errors be reported during evaluation of `body`?
+ */
+ def savingUndeterminedTypeParams[A](reportAmbiguous: Boolean = ambiguousErrors)(body: => A): A = {
+ withMode() {
+ this(AmbiguousErrors) = reportAmbiguous
+ val saved = extractUndetparams()
+ try body
+ finally undetparams = saved
+ }
}
- def setThrowErrors() = mode &= (~AllMask)
- def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask
- def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors
- def condBufferFlush(removeP: AbsTypeError => Boolean) {
- val elems = buffer.filter(removeP)
- buffer --= elems
- }
- def flushBuffer() { buffer.clear() }
- def flushAndReturnBuffer(): Set[AbsTypeError] = {
- val current = buffer.clone()
- buffer.clear()
+ //
+ // Error reporting policies and buffer.
+ //
+
+ private var _reportBuffer: ReportBuffer = new ReportBuffer
+ /** A buffer for errors and warnings, used with `this.bufferErrors == true` */
+ def reportBuffer = _reportBuffer
+ /** Discard the current report buffer, and replace with an empty one */
+ def useFreshReportBuffer() = _reportBuffer = new ReportBuffer
+ /** Discard the current report buffer, and replace with `other` */
+ def restoreReportBuffer(other: ReportBuffer) = _reportBuffer = other
+
+ /** The first error, if any, in the report buffer */
+ def firstError: Option[AbsTypeError] = reportBuffer.firstError
+ /** Does the report buffer contain any errors? */
+ def hasErrors = reportBuffer.hasErrors
+
+ def reportErrors = this(ReportErrors)
+ def bufferErrors = this(BufferErrors)
+ def ambiguousErrors = this(AmbiguousErrors)
+ def throwErrors = contextMode.inNone(ReportErrors | BufferErrors)
+
+ def setReportErrors(): Unit = set(enable = ReportErrors | AmbiguousErrors, disable = BufferErrors)
+ def setBufferErrors(): Unit = set(enable = BufferErrors, disable = ReportErrors | AmbiguousErrors)
+ def setThrowErrors(): Unit = this(ReportErrors | AmbiguousErrors | BufferErrors) = false
+ def setAmbiguousErrors(report: Boolean): Unit = this(AmbiguousErrors) = report
+
+ /** Append the given errors to the report buffer */
+ def updateBuffer(errors: Traversable[AbsTypeError]) = reportBuffer ++= errors
+ /** Clear all errors from the report buffer */
+ def flushBuffer() { reportBuffer.clearAllErrors() }
+ /** Return and clear all errors from the report buffer */
+ def flushAndReturnBuffer(): immutable.Seq[AbsTypeError] = {
+ val current = reportBuffer.errors
+ reportBuffer.clearAllErrors()
current
}
- def flushAndReturnWarningsBuffer(): Set[(Position, String)] = {
- val current = warningsBuffer.clone()
- warningsBuffer.clear()
- current
+
+ /** Issue and clear all warnings from the report buffer */
+ def flushAndIssueWarnings() {
+ reportBuffer.warnings foreach {
+ case (pos, msg) => unit.warning(pos, msg)
+ }
+ reportBuffer.clearAllWarnings()
}
- def logError(err: AbsTypeError) = buffer += err
+ //
+ // Temporary mode adjustment
+ //
- def withImplicitsEnabled[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = true
+ @inline def withMode[T](enabled: ContextMode = NOmode, disabled: ContextMode = NOmode)(op: => T): T = {
+ val saved = contextMode
+ set(enabled, disabled)
try op
- finally implicitsEnabled = saved
+ finally contextMode = saved
}
- def withImplicitsDisabled[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = false
- val savedP = enrichmentEnabled
- enrichmentEnabled = false
- try op
- finally {
- implicitsEnabled = saved
- enrichmentEnabled = savedP
- }
+ @inline final def withImplicitsEnabled[T](op: => T): T = withMode(enabled = ImplicitsEnabled)(op)
+ @inline final def withImplicitsDisabled[T](op: => T): T = withMode(disabled = ImplicitsEnabled | EnrichmentEnabled)(op)
+ @inline final def withImplicitsDisabledAllowEnrichment[T](op: => T): T = withMode(enabled = EnrichmentEnabled, disabled = ImplicitsEnabled)(op)
+ @inline final def withMacrosEnabled[T](op: => T): T = withMode(enabled = MacrosEnabled)(op)
+ @inline final def withMacrosDisabled[T](op: => T): T = withMode(disabled = MacrosEnabled)(op)
+ @inline final def withinStarPatterns[T](op: => T): T = withMode(enabled = StarPatterns)(op)
+ @inline final def withinSuperInit[T](op: => T): T = withMode(enabled = SuperInit)(op)
+ @inline final def withinSecondTry[T](op: => T): T = withMode(enabled = SecondTry)(op)
+ @inline final def withinPatAlternative[T](op: => T): T = withMode(enabled = PatternAlternative)(op)
+
+ /** TypeConstructorAllowed is enabled when we are typing a higher-kinded type.
+ * adapt should then check kind-arity based on the prototypical type's kind
+ * arity. Type arguments should not be inferred.
+ */
+ @inline final def withinTypeConstructorAllowed[T](op: => T): T = withMode(enabled = TypeConstructorAllowed)(op)
+
+ /* TODO - consolidate returnsSeen (which seems only to be used by checkDead)
+ * and ReturnExpr.
+ */
+ @inline final def withinReturnExpr[T](op: => T): T = {
+ enclMethod.returnsSeen = true
+ withMode(enabled = ReturnExpr)(op)
}
- def withImplicitsDisabledAllowEnrichment[T](op: => T): T = {
- val saved = implicitsEnabled
- implicitsEnabled = false
- val savedP = enrichmentEnabled
- enrichmentEnabled = true
- try op
- finally {
- implicitsEnabled = saved
- enrichmentEnabled = savedP
+ // See comment on FormerNonStickyModes.
+ @inline final def withOnlyStickyModes[T](op: => T): T = withMode(disabled = FormerNonStickyModes)(op)
+
+ /** @return true if the `expr` evaluates to true within a silent Context that incurs no errors */
+ @inline final def inSilentMode(expr: => Boolean): Boolean = {
+ withMode() { // withMode with no arguments to restore the mode mutated by `setBufferErrors`.
+ setBufferErrors()
+ try expr && !hasErrors
+ finally reportBuffer.clearAll()
}
}
- def withMacrosEnabled[T](op: => T): T = {
- val saved = macrosEnabled
- macrosEnabled = true
- try op
- finally macrosEnabled = saved
- }
+ //
+ // Child Context Creation
+ //
- def withMacrosDisabled[T](op: => T): T = {
- val saved = macrosEnabled
- macrosEnabled = false
- try op
- finally macrosEnabled = saved
- }
-
- def make(unit: CompilationUnit, tree: Tree, owner: Symbol,
- scope: Scope, imports: List[ImportInfo]): Context = {
- val c = new Context
- c.unit = unit
- c.tree = tree
- c.owner = owner
- c.scope = scope
- c.outer = this
-
- tree match {
- case Template(_, _, _) | PackageDef(_, _) =>
- c.enclClass = c
- c.prefix = c.owner.thisType
- c.inConstructorSuffix = false
- case _ =>
- c.enclClass = this.enclClass
- c.prefix =
- if (c.owner != this.owner && c.owner.isTerm) NoPrefix
- else this.prefix
- c.inConstructorSuffix = this.inConstructorSuffix
+ /**
+ * Construct a child context. The parent and child will share the report buffer.
+ * Compare with `makeSilent`, in which the child has a fresh report buffer.
+ *
+ * If `tree` is an `Import`, that import will be avaiable at the head of
+ * `Context#imports`.
+ */
+ def make(tree: Tree = tree, owner: Symbol = owner,
+ scope: Scope = scope, unit: CompilationUnit = unit): Context = {
+ val isTemplateOrPackage = tree match {
+ case _: Template | _: PackageDef => true
+ case _ => false
+ }
+ val isDefDef = tree match {
+ case _: DefDef => true
+ case _ => false
}
- tree match {
- case DefDef(_, _, _, _, _, _) =>
- c.enclMethod = c
- case _ =>
- c.enclMethod = this.enclMethod
+ val isImport = tree match {
+ case _: Import => true
+ case _ => false
}
- c.variance = this.variance
- c.depth = if (scope == this.scope) this.depth else this.depth + 1
- c.imports = imports
- c.inSelfSuperCall = inSelfSuperCall
- c.restoreState(this.state)
- c.diagnostic = this.diagnostic
- c.typingIndentLevel = typingIndentLevel
- c.implicitsEnabled = this.implicitsEnabled
- c.macrosEnabled = this.macrosEnabled
- c.enrichmentEnabled = this.enrichmentEnabled
- c.checking = this.checking
- c.retyping = this.retyping
- c.openImplicits = this.openImplicits
- c.buffer = if (this.buffer == null) LinkedHashSet[AbsTypeError]() else this.buffer // need to initialize
- c.warningsBuffer = if (this.warningsBuffer == null) LinkedHashSet[(Position, String)]() else this.warningsBuffer
+ val sameOwner = owner == this.owner
+ val prefixInChild =
+ if (isTemplateOrPackage) owner.thisType
+ else if (!sameOwner && owner.isTerm) NoPrefix
+ else prefix
+
+ // The blank canvas
+ val c = if (isImport)
+ new Context(tree, owner, scope, unit, this) with ImportContext
+ else
+ new Context(tree, owner, scope, unit, this)
+
+ // Fields that are directly propagated
+ c.variance = variance
+ c.diagnostic = diagnostic
+ c.typingIndentLevel = typingIndentLevel
+ c.openImplicits = openImplicits
+ c.contextMode = contextMode // note: ConstructorSuffix, a bit within `mode`, is conditionally overwritten below.
+ c._reportBuffer = reportBuffer
+
+ // Fields that may take on a different value in the child
+ c.prefix = prefixInChild
+ c.enclClass = if (isTemplateOrPackage) c else enclClass
+ c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix)
+ c.enclMethod = if (isDefDef) c else enclMethod
+
registerContext(c.asInstanceOf[analyzer.Context])
debuglog("[context] ++ " + c.unit + " / " + tree.summaryString)
c
}
- // TODO: remove? Doesn't seem to be used
- def make(unit: CompilationUnit): Context = {
- val c = make(unit, EmptyTree, owner, scope, imports)
- c.setReportErrors()
- c.implicitsEnabled = true
- c.macrosEnabled = true
- c
- }
-
- def makeNewImport(sym: Symbol): Context =
- makeNewImport(gen.mkWildcardImport(sym))
-
- def makeNewImport(imp: Import): Context =
- make(unit, imp, owner, scope, new ImportInfo(imp, depth) :: imports)
-
def make(tree: Tree, owner: Symbol, scope: Scope): Context =
+ // TODO SI-7345 Moving this optimization into the main overload of `make` causes all tests to fail.
+ // even if it is extened to check that `unit == this.unit`. Why is this?
if (tree == this.tree && owner == this.owner && scope == this.scope) this
- else make0(tree, owner, scope)
-
- private def make0(tree: Tree, owner: Symbol, scope: Scope): Context =
- make(unit, tree, owner, scope, imports)
+ else make(tree, owner, scope, unit)
+ /** Make a child context that represents a new nested scope */
def makeNewScope(tree: Tree, owner: Symbol): Context =
make(tree, owner, newNestedScope(scope))
- // IDE stuff: distinguish between scopes created for typing and scopes created for naming.
-
- def make(tree: Tree, owner: Symbol): Context =
- make0(tree, owner, scope)
- def make(tree: Tree): Context =
- make(tree, owner)
-
- def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = {
+ /** Make a child context that buffers errors and warnings into a fresh report buffer. */
+ def makeSilent(reportAmbiguousErrors: Boolean = ambiguousErrors, newtree: Tree = tree): Context = {
val c = make(newtree)
c.setBufferErrors()
c.setAmbiguousErrors(reportAmbiguousErrors)
- c.buffer = new LinkedHashSet[AbsTypeError]()
+ c._reportBuffer = new ReportBuffer // A fresh buffer so as not to leak errors/warnings into `this`.
c
}
+ /** Make a silent child context does not allow implicits. Used to prevent chaining of implicit views. */
def makeImplicit(reportAmbiguousErrors: Boolean) = {
val c = makeSilent(reportAmbiguousErrors)
- c.implicitsEnabled = false
- c.enrichmentEnabled = false
+ c(ImplicitsEnabled | EnrichmentEnabled) = false
c
}
@@ -355,12 +485,10 @@ trait Contexts { self: Analyzer =>
* accessible.
*/
def makeConstructorContext = {
- var baseContext = enclClass.outer
- while (baseContext.tree.isInstanceOf[Template])
- baseContext = baseContext.outer
+ val baseContext = enclClass.outer.nextEnclosing(!_.tree.isInstanceOf[Template])
val argContext = baseContext.makeNewScope(tree, owner)
+ argContext.contextMode = contextMode
argContext.inSelfSuperCall = true
- argContext.restoreState(this.state)
def enterElems(c: Context) {
def enterLocalElems(e: ScopeEntry) {
if (e != null && e.owner == c.scope) {
@@ -368,7 +496,7 @@ trait Contexts { self: Analyzer =>
argContext.scope enter e.sym
}
}
- if (c.owner.isTerm && !c.owner.isLocalDummy) {
+ if (c.isLocal && !c.owner.isLocalDummy) {
enterElems(c.outer)
enterLocalElems(c.scope.elems)
}
@@ -379,6 +507,10 @@ trait Contexts { self: Analyzer =>
argContext
}
+ //
+ // Error and warning issuance
+ //
+
private def addDiagString(msg: String) = {
val ds =
if (diagnostic.isEmpty) ""
@@ -390,19 +522,23 @@ trait Contexts { self: Analyzer =>
unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg)
@inline private def issueCommon(err: AbsTypeError)(pf: PartialFunction[AbsTypeError, Unit]) {
- debugwarn("issue error: " + err.errMsg)
- if (settings.Yissuedebug.value) (new Exception).printStackTrace()
+ if (settings.Yissuedebug) {
+ log("issue error: " + err.errMsg)
+ (new Exception).printStackTrace()
+ }
if (pf isDefinedAt err) pf(err)
- else if (bufferErrors) { buffer += err }
+ else if (bufferErrors) { reportBuffer += err }
else throw new TypeError(err.errPos, err.errMsg)
}
+ /** Issue/buffer/throw the given type error according to the current mode for error reporting. */
def issue(err: AbsTypeError) {
issueCommon(err) { case _ if reportErrors =>
unitError(err.errPos, addDiagString(err.errMsg))
}
}
+ /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) {
issueCommon(err) { case _ if ambiguousErrors =>
if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous)
@@ -410,44 +546,31 @@ trait Contexts { self: Analyzer =>
}
}
+ /** Issue/buffer/throw the given implicit ambiguity error according to the current mode for error reporting. */
def issueAmbiguousError(err: AbsTypeError) {
issueCommon(err) { case _ if ambiguousErrors => unitError(err.errPos, addDiagString(err.errMsg)) }
}
- // TODO remove
+ /** Issue/throw the given `err` according to the current mode for error reporting. */
def error(pos: Position, err: Throwable) =
if (reportErrors) unitError(pos, addDiagString(err.getMessage()))
else throw err
+ /** Issue/throw the given error message according to the current mode for error reporting. */
def error(pos: Position, msg: String) = {
val msg1 = addDiagString(msg)
if (reportErrors) unitError(pos, msg1)
else throw new TypeError(pos, msg1)
}
- def warning(pos: Position, msg: String): Unit = warning(pos, msg, false)
- def warning(pos: Position, msg: String, force: Boolean) {
+ /** Issue/throw the given error message according to the current mode for error reporting. */
+ def warning(pos: Position, msg: String, force: Boolean = false) {
if (reportErrors || force) unit.warning(pos, msg)
- else if (bufferErrors) warningsBuffer += ((pos, msg))
+ else if (bufferErrors) reportBuffer += (pos -> msg)
}
- def isLocal(): Boolean = tree match {
- case Block(_,_) => true
- case PackageDef(_, _) => false
- case EmptyTree => false
- case _ => outer.isLocal()
- }
-
- /** Fast path for some slow checks (ambiguous assignment in Refchecks, and
- * existence of __match for MatchTranslation in virtpatmat.) This logic probably
- * needs improvement.
- */
- def isNameInScope(name: Name) = (
- enclosingContextChain exists (ctx =>
- (ctx.scope.lookupEntry(name) != null)
- || (ctx.owner.rawInfo.member(name) != NoSymbol)
- )
- )
+ /** Is the owning symbol of this context a term? */
+ final def isLocal: Boolean = owner.isTerm
// nextOuter determines which context is searched next for implicits
// (after `this`, which contributes `newImplicits` below.) In
@@ -473,26 +596,35 @@ trait Contexts { self: Analyzer =>
def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain
- override def toString = "Context(%s@%s unit=%s scope=%s errors=%b, reportErrors=%b, throwErrors=%b)".format(
- owner.fullName, tree.shortClass, unit, scope.##, hasErrors, reportErrors, throwErrors
- )
- /** Is `sub` a subclass of `base` or a companion object of such a subclass?
- */
- def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
- sub.isNonBottomSubClass(base) ||
- sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
-
- /** Return closest enclosing context that defines a superclass of `clazz`, or a
- * companion module of a superclass of `clazz`, or NoContext if none exists */
- def enclosingSuperClassContext(clazz: Symbol): Context = {
- var c = this.enclClass
- while (c != NoContext &&
- !clazz.isNonBottomSubClass(c.owner) &&
- !(c.owner.isModuleClass && clazz.isNonBottomSubClass(c.owner.companionClass)))
- c = c.outer.enclClass
- c
+ private def treeTruncated = tree.toString.replaceAll("\\s+", " ").lines.mkString("\\n").take(70)
+ private def treeIdString = if (settings.uniqid.value) "#" + System.identityHashCode(tree).toString.takeRight(3) else ""
+ private def treeString = tree match {
+ case x: Import => "" + x
+ case Template(parents, `emptyValDef`, body) =>
+ val pstr = if ((parents eq null) || parents.isEmpty) "Nil" else parents mkString " "
+ val bstr = if (body eq null) "" else body.length + " stats"
+ s"""Template($pstr, _, $bstr)"""
+ case x => s"${tree.shortClass}${treeIdString}:${treeTruncated}"
}
+ override def toString =
+ sm"""|Context($unit) {
+ | owner = $owner
+ | tree = $treeString
+ | scope = ${scope.size} decls
+ | contextMode = $contextMode
+ | outer.owner = ${outer.owner}
+ |}"""
+
+ //
+ // Accessibility checking
+ //
+
+ /** Is `sub` a subclass of `base` or a companion object of such a subclass? */
+ private def isSubClassOrCompanion(sub: Symbol, base: Symbol) =
+ sub.isNonBottomSubClass(base) ||
+ sub.isModuleClass && sub.linkedClassOfClass.isNonBottomSubClass(base)
+
/** Return the closest enclosing context that defines a subclass of `clazz`
* or a companion object thereof, or `NoContext` if no such context exists.
*/
@@ -503,9 +635,7 @@ trait Contexts { self: Analyzer =>
c
}
- /** Is `sym` accessible as a member of tree `site` with type
- * `pre` in current context?
- */
+ /** Is `sym` accessible as a member of `pre` in current context? */
def isAccessible(sym: Symbol, pre: Type, superAccess: Boolean = false): Boolean = {
lastAccessCheckDetails = ""
// Console.println("isAccessible(%s, %s, %s)".format(sym, pre, superAccess))
@@ -518,7 +648,7 @@ trait Contexts { self: Analyzer =>
(linked ne NoSymbol) && accessWithin(linked)
}
- /** Are we inside definition of `ab`? */
+ /* Are we inside definition of `ab`? */
def accessWithin(ab: Symbol) = {
// #3663: we must disregard package nesting if sym isJavaDefined
if (sym.isJavaDefined) {
@@ -530,26 +660,12 @@ trait Contexts { self: Analyzer =>
} else (owner hasTransOwner ab)
}
-/*
- var c = this
- while (c != NoContext && c.owner != owner) {
- if (c.outer eq null) abort("accessWithin(" + owner + ") " + c);//debug
- if (c.outer.enclClass eq null) abort("accessWithin(" + owner + ") " + c);//debug
- c = c.outer.enclClass
- }
- c != NoContext
- }
-*/
- /** Is `clazz` a subclass of an enclosing class? */
- def isSubClassOfEnclosing(clazz: Symbol): Boolean =
- enclosingSuperClassContext(clazz) != NoContext
-
def isSubThisType(pre: Type, clazz: Symbol): Boolean = pre match {
case ThisType(pclazz) => pclazz isNonBottomSubClass clazz
case _ => false
}
- /** Is protected access to target symbol permitted */
+ /* Is protected access to target symbol permitted */
def isProtectedAccessOK(target: Symbol) = {
val c = enclosingSubClassContext(sym.owner)
if (c == NoContext)
@@ -589,8 +705,7 @@ trait Contexts { self: Analyzer =>
( superAccess
|| pre.isInstanceOf[ThisType]
|| phase.erasedTypes
- || isProtectedAccessOK(sym)
- || (sym.allOverriddenSymbols exists isProtectedAccessOK)
+ || (sym.overrideChain exists isProtectedAccessOK)
// that last condition makes protected access via self types work.
)
)
@@ -600,26 +715,50 @@ trait Contexts { self: Analyzer =>
}
}
+ //
+ // Type bound management
+ //
+
def pushTypeBounds(sym: Symbol) {
+ sym.info match {
+ case tb: TypeBounds => if (!tb.isEmptyBounds) log(s"Saving $sym info=$tb")
+ case info => devWarning(s"Something other than a TypeBounds seen in pushTypeBounds: $info is a ${shortClassOfInstance(info)}")
+ }
savedTypeBounds ::= ((sym, sym.info))
}
def restoreTypeBounds(tp: Type): Type = {
- var current = tp
- for ((sym, info) <- savedTypeBounds) {
- debuglog("resetting " + sym + " to " + info);
- sym.info match {
- case TypeBounds(lo, hi) if (hi <:< lo && lo <:< hi) =>
- current = current.instantiateTypeParams(List(sym), List(lo))
-//@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
- case _ =>
- }
- sym.setInfo(info)
+ def restore(): Type = savedTypeBounds.foldLeft(tp) { case (current, (sym, savedInfo)) =>
+ def bounds_s(tb: TypeBounds) = if (tb.isEmptyBounds) "<empty bounds>" else s"TypeBounds(lo=${tb.lo}, hi=${tb.hi})"
+ //@M TODO: when higher-kinded types are inferred, probably need a case PolyType(_, TypeBounds(...)) if ... =>
+ val TypeBounds(lo, hi) = sym.info.bounds
+ val isUnique = lo <:< hi && hi <:< lo
+ val isPresent = current contains sym
+ def saved_s = bounds_s(savedInfo.bounds)
+ def current_s = bounds_s(sym.info.bounds)
+
+ if (isUnique && isPresent)
+ devWarningResult(s"Preserving inference: ${sym.nameString}=$hi in $current (based on $current_s) before restoring $sym to saved $saved_s")(
+ current.instantiateTypeParams(List(sym), List(hi))
+ )
+ else if (isPresent)
+ devWarningResult(s"Discarding inferred $current_s because it does not uniquely determine $sym in")(current)
+ else
+ logResult(s"Discarding inferred $current_s because $sym does not appear in")(current)
+ }
+ try restore()
+ finally {
+ for ((sym, savedInfo) <- savedTypeBounds)
+ sym setInfo debuglogResult(s"Discarding inferred $sym=${sym.info}, restoring saved info")(savedInfo)
+
+ savedTypeBounds = Nil
}
- savedTypeBounds = List()
- current
}
+ //
+ // Implicit collection
+ //
+
private var implicitsCache: List[List[ImplicitInfo]] = null
private var implicitsRunId = NoRunId
@@ -662,7 +801,7 @@ trait Contexts { self: Analyzer =>
case ImportSelector(from, _, to, _) :: sels1 =>
var impls = collect(sels1) filter (info => info.name != from)
if (to != nme.WILDCARD) {
- for (sym <- imp.importedSymbol(to).alternatives)
+ for (sym <- importedAccessibleSymbol(imp, to).alternatives)
if (isQualifyingImplicit(to, sym, pre, imported = true))
impls = new ImplicitInfo(to, pre, sym) :: impls
}
@@ -679,6 +818,8 @@ trait Contexts { self: Analyzer =>
* filtered out later by `eligibleInfos` (SI-4270 / 9129cfe9), as they don't type-check.
*/
def implicitss: List[List[ImplicitInfo]] = {
+ val imports = this.imports
+ val nextOuter = this.nextOuter
if (implicitsRunId != currentRunId) {
implicitsRunId = currentRunId
implicitsCache = List()
@@ -695,8 +836,8 @@ trait Contexts { self: Analyzer =>
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
debuglog("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope, NoPrefix)
- } else if (imports != nextOuter.imports) {
- assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports))
+ } else if (firstImport != nextOuter.firstImport) {
+ assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports))
collectImplicitImports(imports.head)
} else if (owner.isPackageClass) {
// the corresponding package object may contain implicit members.
@@ -708,6 +849,304 @@ trait Contexts { self: Analyzer =>
implicitsCache
}
+ //
+ // Imports and symbol lookup
+ //
+
+ /** It's possible that seemingly conflicting identifiers are
+ * identifiably the same after type normalization. In such cases,
+ * allow compilation to proceed. A typical example is:
+ * package object foo { type InputStream = java.io.InputStream }
+ * import foo._, java.io._
+ */
+ private def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = {
+ val imp1Explicit = imp1 isExplicitImport name
+ val imp2Explicit = imp2 isExplicitImport name
+ val ambiguous = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit
+ val imp1Symbol = (imp1 importedSymbol name).initialize filter (s => isAccessible(s, imp1.qual.tpe, superAccess = false))
+ val imp2Symbol = (imp2 importedSymbol name).initialize filter (s => isAccessible(s, imp2.qual.tpe, superAccess = false))
+
+ // The types of the qualifiers from which the ambiguous imports come.
+ // If the ambiguous name is a value, these must be the same.
+ def t1 = imp1.qual.tpe
+ def t2 = imp2.qual.tpe
+ // The types of the ambiguous symbols, seen as members of their qualifiers.
+ // If the ambiguous name is a monomorphic type, we can relax this far.
+ def mt1 = t1 memberType imp1Symbol
+ def mt2 = t2 memberType imp2Symbol
+
+ def characterize = List(
+ s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
+ s"member type 1: $mt1",
+ s"member type 2: $mt2"
+ ).mkString("\n ")
+
+ if (!ambiguous || !imp2Symbol.exists) Some(imp1)
+ else if (!imp1Symbol.exists) Some(imp2)
+ else (
+ // The symbol names are checked rather than the symbols themselves because
+ // each time an overloaded member is looked up it receives a new symbol.
+ // So foo.member("x") != foo.member("x") if x is overloaded. This seems
+ // likely to be the cause of other bugs too...
+ if (t1 =:= t2 && imp1Symbol.name == imp2Symbol.name) {
+ log(s"Suppressing ambiguous import: $t1 =:= $t2 && $imp1Symbol == $imp2Symbol")
+ Some(imp1)
+ }
+ // Monomorphism restriction on types is in part because type aliases could have the
+ // same target type but attach different variance to the parameters. Maybe it can be
+ // relaxed, but doesn't seem worth it at present.
+ else if (mt1 =:= mt2 && name.isTypeName && imp1Symbol.isMonomorphicType && imp2Symbol.isMonomorphicType) {
+ log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $imp1Symbol and $imp2Symbol are equivalent")
+ Some(imp1)
+ }
+ else {
+ log(s"Import is genuinely ambiguous:\n " + characterize)
+ None
+ }
+ )
+ }
+
+ /** The symbol with name `name` imported via the import in `imp`,
+ * if any such symbol is accessible from this context.
+ */
+ def importedAccessibleSymbol(imp: ImportInfo, name: Name): Symbol =
+ importedAccessibleSymbol(imp, name, requireExplicit = false)
+
+ private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean): Symbol =
+ imp.importedSymbol(name, requireExplicit) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false))
+
+ /** Is `sym` defined in package object of package `pkg`?
+ * Since sym may be defined in some parent of the package object,
+ * we cannot inspect its owner only; we have to go through the
+ * info of the package object. However to avoid cycles we'll check
+ * what other ways we can before pushing that way.
+ */
+ def isInPackageObject(sym: Symbol, pkg: Symbol): Boolean = {
+ def uninitialized(what: String) = {
+ log(s"Cannot look for $sym in package object of $pkg; $what is not initialized.")
+ false
+ }
+ def pkgClass = if (pkg.isTerm) pkg.moduleClass else pkg
+ def matchesInfo = (
+ // need to be careful here to not get a cyclic reference during bootstrap
+ if (pkg.isInitialized) {
+ val module = pkg.info member nme.PACKAGEkw
+ if (module.isInitialized)
+ module.info.member(sym.name).alternatives contains sym
+ else
+ uninitialized("" + module)
+ }
+ else uninitialized("" + pkg)
+ )
+ def inPackageObject(sym: Symbol) = (
+ // To be in the package object, one of these must be true:
+ // 1) sym.owner is a package object class, and sym.owner.owner is the package class for `pkg`
+ // 2) sym.owner is inherited by the correct package object class
+ // We try to establish 1) by inspecting the owners directly, and then we try
+ // to rule out 2), and only if both those fail do we resort to looking in the info.
+ !sym.isPackage && (sym.owner ne NoSymbol) && (
+ if (sym.owner.isPackageObjectClass)
+ sym.owner.owner == pkgClass
+ else
+ !sym.owner.isPackageClass && matchesInfo
+ )
+ )
+
+ // An overloaded symbol might not have the expected owner!
+ // The alternatives must be inspected directly.
+ pkgClass.isPackageClass && (
+ if (sym.isOverloaded)
+ sym.alternatives forall (isInPackageObject(_, pkg))
+ else
+ inPackageObject(sym)
+ )
+ }
+
+ def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess
+
+ /** Find the symbol of a simple name starting from this context.
+ * All names are filtered through the "qualifies" predicate,
+ * the search continuing as long as no qualifying name is found.
+ */
+ def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = {
+ var lookupError: NameLookup = null // set to non-null if a definite error is encountered
+ var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found
+ var defSym: Symbol = NoSymbol // the directly found symbol
+ var pre: Type = NoPrefix // the prefix type of defSym, if a class member
+ var cx: Context = this // the context under consideration
+ var symbolDepth: Int = -1 // the depth of the directly found symbol
+
+ def finish(qual: Tree, sym: Symbol): NameLookup = (
+ if (lookupError ne null) lookupError
+ else sym match {
+ case NoSymbol if inaccessible ne null => inaccessible
+ case NoSymbol => LookupNotFound
+ case _ => LookupSucceeded(qual, sym)
+ }
+ )
+ def finishDefSym(sym: Symbol, pre0: Type): NameLookup =
+ if (requiresQualifier(sym))
+ finish(gen.mkAttributedQualifier(pre0), sym)
+ else
+ finish(EmptyTree, sym)
+
+ def isPackageOwnedInDifferentUnit(s: Symbol) = (
+ s.isDefinedInPackage && (
+ !currentRun.compiles(s)
+ || unit.exists && s.sourceFile != unit.source.file
+ )
+ )
+ def requiresQualifier(s: Symbol) = (
+ s.owner.isClass
+ && !s.owner.isPackageClass
+ && !s.isTypeParameterOrSkolem
+ )
+ def lookupInPrefix(name: Name) = pre member name filter qualifies
+ def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false)
+
+ def searchPrefix = {
+ cx = cx.enclClass
+ val found0 = lookupInPrefix(name)
+ val found1 = found0 filter accessibleInPrefix
+ if (found0.exists && !found1.exists && inaccessible == null)
+ inaccessible = LookupInaccessible(found0, analyzer.lastAccessCheckDetails)
+
+ found1
+ }
+
+ def lookupInScope(scope: Scope) =
+ (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
+
+ def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) =
+ logResult(s"!!! lookup overloaded")(owner.newOverloaded(pre, entries map (_.sym)))
+
+ // Constructor lookup should only look in the decls of the enclosing class
+ // not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745)
+ if (name == nme.CONSTRUCTOR) return {
+ val enclClassSym = cx.enclClass.owner
+ val scope = cx.enclClass.prefix.baseType(enclClassSym).decls
+ val constructorSym = lookupInScope(scope) match {
+ case Nil => NoSymbol
+ case hd :: Nil => hd.sym
+ case entries => newOverloaded(enclClassSym, cx.enclClass.prefix, entries)
+ }
+ finishDefSym(constructorSym, cx.enclClass.prefix)
+ }
+
+ // cx.scope eq null arises during FixInvalidSyms in Duplicators
+ while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) {
+ pre = cx.enclClass.prefix
+ defSym = lookupInScope(cx.scope) match {
+ case Nil => searchPrefix
+ case entries @ (hd :: tl) =>
+ // we have a winner: record the symbol depth
+ symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth
+ if (tl.isEmpty) hd.sym
+ else newOverloaded(cx.owner, pre, entries)
+ }
+ if (!defSym.exists)
+ cx = cx.outer // push further outward
+ }
+ if (symbolDepth < 0)
+ symbolDepth = cx.depth
+
+ var impSym: Symbol = NoSymbol
+ var imports = Context.this.imports
+ def imp1 = imports.head
+ def imp2 = imports.tail.head
+ def sameDepth = imp1.depth == imp2.depth
+ def imp1Explicit = imp1 isExplicitImport name
+ def imp2Explicit = imp2 isExplicitImport name
+
+ def lookupImport(imp: ImportInfo, requireExplicit: Boolean) =
+ importedAccessibleSymbol(imp, name, requireExplicit) filter qualifies
+
+ // Java: A single-type-import declaration d in a compilation unit c of package p
+ // that imports a type named n shadows, throughout c, the declarations of:
+ //
+ // 1) any top level type named n declared in another compilation unit of p
+ //
+ // A type-import-on-demand declaration never causes any other declaration to be shadowed.
+ //
+ // Scala: Bindings of different kinds have a precedence defined on them:
+ //
+ // 1) Definitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the definition occurs have
+ // highest precedence.
+ // 2) Explicit imports have next highest precedence.
+ def depthOk(imp: ImportInfo) = (
+ imp.depth > symbolDepth
+ || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth)
+ )
+
+ while (!impSym.exists && imports.nonEmpty && depthOk(imports.head)) {
+ impSym = lookupImport(imp1, requireExplicit = false)
+ if (!impSym.exists)
+ imports = imports.tail
+ }
+
+ if (defSym.exists && impSym.exists) {
+ // imported symbols take precedence over package-owned symbols in different compilation units.
+ if (isPackageOwnedInDifferentUnit(defSym))
+ defSym = NoSymbol
+ // Defined symbols take precedence over erroneous imports.
+ else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
+ impSym = NoSymbol
+ // Otherwise they are irreconcilably ambiguous
+ else
+ return ambiguousDefnAndImport(defSym.owner, imp1)
+ }
+
+ // At this point only one or the other of defSym and impSym might be set.
+ if (defSym.exists)
+ finishDefSym(defSym, pre)
+ else if (impSym.exists) {
+ // We continue walking down the imports as long as the tail is non-empty, which gives us:
+ // imports == imp1 :: imp2 :: _
+ // And at least one of the following is true:
+ // - imp1 and imp2 are at the same depth
+ // - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked
+ def keepLooking = (
+ lookupError == null
+ && imports.tail.nonEmpty
+ && (sameDepth || !imp1Explicit)
+ )
+ // If we find a competitor imp2 which imports the same name, possible outcomes are:
+ //
+ // - same depth, imp1 wild, imp2 explicit: imp2 wins, drop imp1
+ // - same depth, imp1 wild, imp2 wild: ambiguity check
+ // - same depth, imp1 explicit, imp2 explicit: ambiguity check
+ // - differing depth, imp1 wild, imp2 explicit: ambiguity check
+ // - all others: imp1 wins, drop imp2
+ //
+ // The ambiguity check is: if we can verify that both imports refer to the same
+ // symbol (e.g. import foo.X followed by import foo._) then we discard imp2
+ // and proceed. If we cannot, issue an ambiguity error.
+ while (keepLooking) {
+ // If not at the same depth, limit the lookup to explicit imports.
+ // This is desirable from a performance standpoint (compare to
+ // filtering after the fact) but also necessary to keep the unused
+ // import check from being misled by symbol lookups which are not
+ // actually used.
+ val other = lookupImport(imp2, requireExplicit = !sameDepth)
+ def imp1wins() = { imports = imp1 :: imports.tail.tail }
+ def imp2wins() = { impSym = other ; imports = imports.tail }
+
+ if (!other.exists) // imp1 wins; drop imp2 and continue.
+ imp1wins()
+ else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue.
+ imp2wins()
+ else resolveAmbiguousImport(name, imp1, imp2) match {
+ case Some(imp) => if (imp eq imp1) imp1wins() else imp2wins()
+ case _ => lookupError = ambiguousImports(imp1, imp2)
+ }
+ }
+ // optimization: don't write out package prefixes
+ finish(resetPos(imp1.qual.duplicate), impSym)
+ }
+ else finish(EmptyTree, NoSymbol)
+ }
+
/**
* Find a symbol in this context or one of its outers.
*
@@ -731,12 +1170,84 @@ trait Contexts { self: Analyzer =>
}
} //class Context
+ /** A `Context` focussed on an `Import` tree */
+ trait ImportContext extends Context {
+ private val impInfo: ImportInfo = {
+ val info = new ImportInfo(tree.asInstanceOf[Import], outerDepth)
+ if (settings.lint && !isRootImport) // excludes java.lang/scala/Predef imports
+ allImportInfos(unit) ::= info
+ info
+ }
+ override final def imports = impInfo :: super.imports
+ override final def firstImport = Some(impInfo)
+ override final def isRootImport = !tree.pos.isDefined
+ override final def toString = s"ImportContext { $impInfo; outer.owner = ${outer.owner} }"
+ }
+
+ /** A buffer for warnings and errors that are accumulated during speculative type checking. */
+ final class ReportBuffer {
+ type Error = AbsTypeError
+ type Warning = (Position, String)
+
+ private def newBuffer[A] = mutable.LinkedHashSet.empty[A] // Important to use LinkedHS for stable results.
+
+ // [JZ] Contexts, pre- the SI-7345 refactor, avoided allocating the buffers until needed. This
+ // is replicated here out of conservatism.
+ private var _errorBuffer: mutable.LinkedHashSet[Error] = _
+ private def errorBuffer = {if (_errorBuffer == null) _errorBuffer = newBuffer; _errorBuffer}
+ def errors: immutable.Seq[Error] = errorBuffer.toVector
+
+ private var _warningBuffer: mutable.LinkedHashSet[Warning] = _
+ private def warningBuffer = {if (_warningBuffer == null) _warningBuffer = newBuffer; _warningBuffer}
+ def warnings: immutable.Seq[Warning] = warningBuffer.toVector
+
+ def +=(error: AbsTypeError): this.type = {
+ errorBuffer += error
+ this
+ }
+ def ++=(errors: Traversable[AbsTypeError]): this.type = {
+ errorBuffer ++= errors
+ this
+ }
+ def +=(warning: Warning): this.type = {
+ warningBuffer += warning
+ this
+ }
+
+ def clearAll(): this.type = {
+ clearAllErrors(); clearAllWarnings();
+ }
+
+ def clearAllErrors(): this.type = {
+ errorBuffer.clear()
+ this
+ }
+ def clearErrors(removeF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+ errorBuffer.retain(!PartialFunction.cond(_)(removeF))
+ this
+ }
+ def retainErrors(leaveF: PartialFunction[AbsTypeError, Boolean]): this.type = {
+ errorBuffer.retain(PartialFunction.cond(_)(leaveF))
+ this
+ }
+ def clearAllWarnings(): this.type = {
+ warningBuffer.clear()
+ this
+ }
+
+ def hasErrors = errorBuffer.nonEmpty
+ def firstError = errorBuffer.headOption
+ }
+
class ImportInfo(val tree: Import, val depth: Int) {
+ def pos = tree.pos
+ def posOf(sel: ImportSelector) = tree.pos withPoint sel.namePos
+
/** The prefix expression */
def qual: Tree = tree.symbol.info match {
case ImportType(expr) => expr
- case ErrorType => tree setType NoType // fix for #2870
- case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
+ case ErrorType => tree setType NoType // fix for #2870
+ case _ => throw new FatalError("symbol " + tree.symbol + " has bad type: " + tree.symbol.info) //debug
}
/** Is name imported explicitly, not via wildcard? */
@@ -745,25 +1256,53 @@ trait Contexts { self: Analyzer =>
/** The symbol with name `name` imported from import clause `tree`.
*/
- def importedSymbol(name: Name): Symbol = {
+ def importedSymbol(name: Name): Symbol = importedSymbol(name, requireExplicit = false)
+
+ private def recordUsage(sel: ImportSelector, result: Symbol) {
+ def posstr = pos.source.file.name + ":" + posOf(sel).safeLine
+ def resstr = if (tree.symbol.hasCompleteInfo) s"(qual=$qual, $result)" else s"(expr=${tree.expr}, ${result.fullLocationString})"
+ debuglog(s"In $this at $posstr, selector '${selectorString(sel)}' resolved to $resstr")
+ allUsedSelectors(this) += sel
+ }
+
+ /** If requireExplicit is true, wildcard imports are not considered. */
+ def importedSymbol(name: Name, requireExplicit: Boolean): Symbol = {
var result: Symbol = NoSymbol
var renamed = false
var selectors = tree.selectors
- while (selectors != Nil && result == NoSymbol) {
- if (selectors.head.rename == name.toTermName)
+ def current = selectors.head
+ while (selectors.nonEmpty && result == NoSymbol) {
+ if (current.rename == name.toTermName)
result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports
- if (name.isTypeName) selectors.head.name.toTypeName else selectors.head.name)
- else if (selectors.head.name == name.toTermName)
+ if (name.isTypeName) current.name.toTypeName else current.name)
+ else if (current.name == name.toTermName)
renamed = true
- else if (selectors.head.name == nme.WILDCARD && !renamed)
+ else if (current.name == nme.WILDCARD && !renamed && !requireExplicit)
result = qual.tpe.nonLocalMember(name)
- selectors = selectors.tail
+
+ if (result == NoSymbol)
+ selectors = selectors.tail
}
- result
+ if (settings.lint && selectors.nonEmpty && result != NoSymbol && pos != NoPosition)
+ recordUsage(current, result)
+
+ // Harden against the fallout from bugs like SI-6745
+ //
+ // [JZ] I considered issuing a devWarning and moving the
+ // check inside the above loop, as I believe that
+ // this always represents a mistake on the part of
+ // the caller.
+ if (definitions isImportable result) result
+ else NoSymbol
+ }
+ private def selectorString(s: ImportSelector): String = {
+ if (s.name == nme.WILDCARD && s.rename == null) "_"
+ else if (s.name == s.rename) "" + s.name
+ else s.name + " => " + s.rename
}
def allImportedSymbols: Iterable[Symbol] =
- qual.tpe.members flatMap (transformImport(tree.selectors, _))
+ importableMembers(qual.tpe) flatMap (transformImport(tree.selectors, _))
private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match {
case List() => List()
@@ -774,10 +1313,123 @@ trait Contexts { self: Analyzer =>
case _ :: rest => transformImport(rest, sym)
}
- override def toString() = tree.toString()
+ override def hashCode = tree.##
+ override def equals(other: Any) = other match {
+ case that: ImportInfo => (tree == that.tree)
+ case _ => false
+ }
+ override def toString = tree.toString
}
case class ImportType(expr: Tree) extends Type {
override def safeToString = "ImportType("+expr+")"
}
}
+
+object ContextMode {
+ private implicit def liftIntBitsToContextState(bits: Int): ContextMode = apply(bits)
+ def apply(bits: Int): ContextMode = new ContextMode(bits)
+ final val NOmode: ContextMode = 0
+
+ final val ReportErrors: ContextMode = 1 << 0
+ final val BufferErrors: ContextMode = 1 << 1
+ final val AmbiguousErrors: ContextMode = 1 << 2
+
+ /** Are we in a secondary constructor after the this constructor call? */
+ final val ConstructorSuffix: ContextMode = 1 << 3
+
+ /** For method context: were returns encountered? */
+ final val ReturnsSeen: ContextMode = 1 << 4
+
+ /** Is this context (enclosed in) a constructor call?
+ * (the call to the super or self constructor in the first line of a constructor.)
+ * In such a context, the object's fields should not be in scope
+ */
+ final val SelfSuperCall: ContextMode = 1 << 5
+
+ // TODO harvest documentation for this
+ final val ImplicitsEnabled: ContextMode = 1 << 6
+
+ final val MacrosEnabled: ContextMode = 1 << 7
+
+ /** To selectively allow enrichment in patterns, where other kinds of implicit conversions are not allowed */
+ final val EnrichmentEnabled: ContextMode = 1 << 8
+
+ /** Are we in a run of [[scala.tools.nsc.typechecker.TreeCheckers]]? */
+ final val Checking: ContextMode = 1 << 9
+
+ /** Are we retypechecking arguments independently from the function applied to them? See `Typer.tryTypedApply`
+ * TODO - iron out distinction/overlap with SecondTry.
+ */
+ final val ReTyping: ContextMode = 1 << 10
+
+ /** Are we typechecking pattern alternatives. Formerly ALTmode. */
+ final val PatternAlternative: ContextMode = 1 << 11
+
+ /** Are star patterns allowed. Formerly STARmode. */
+ final val StarPatterns: ContextMode = 1 << 12
+
+ /** Are we typing the "super" in a superclass constructor call super.<init>. Formerly SUPERCONSTRmode. */
+ final val SuperInit: ContextMode = 1 << 13
+
+ /* Is this the second attempt to type this tree? In that case functions
+ * may no longer be coerced with implicit views. Formerly SNDTRYmode.
+ */
+ final val SecondTry: ContextMode = 1 << 14
+
+ /** Are we in return position? Formerly RETmode. */
+ final val ReturnExpr: ContextMode = 1 << 15
+
+ /** Are unapplied type constructors allowed here? Formerly HKmode. */
+ final val TypeConstructorAllowed: ContextMode = 1 << 16
+
+ /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode.
+ * To mimick the sticky mode behavior, when captain stickyfingers
+ * comes around we need to propagate those modes but forget the other
+ * context modes which were once mode bits; those being so far the
+ * ones listed here.
+ */
+ final val FormerNonStickyModes: ContextMode = (
+ PatternAlternative | StarPatterns | SuperInit | SecondTry | ReturnExpr | TypeConstructorAllowed
+ )
+
+ final val DefaultMode: ContextMode = MacrosEnabled
+
+ private val contextModeNameMap = Map(
+ ReportErrors -> "ReportErrors",
+ BufferErrors -> "BufferErrors",
+ AmbiguousErrors -> "AmbiguousErrors",
+ ConstructorSuffix -> "ConstructorSuffix",
+ SelfSuperCall -> "SelfSuperCall",
+ ImplicitsEnabled -> "ImplicitsEnabled",
+ MacrosEnabled -> "MacrosEnabled",
+ Checking -> "Checking",
+ ReTyping -> "ReTyping",
+ PatternAlternative -> "PatternAlternative",
+ StarPatterns -> "StarPatterns",
+ SuperInit -> "SuperInit",
+ SecondTry -> "SecondTry",
+ TypeConstructorAllowed -> "TypeConstructorAllowed"
+ )
+}
+
+/**
+ * A value class to carry the boolean flags of a context, such as whether errors should
+ * be buffered or reported.
+ */
+final class ContextMode private (val bits: Int) extends AnyVal {
+ import ContextMode._
+
+ def &(other: ContextMode): ContextMode = new ContextMode(bits & other.bits)
+ def |(other: ContextMode): ContextMode = new ContextMode(bits | other.bits)
+ def &~(other: ContextMode): ContextMode = new ContextMode(bits & ~(other.bits))
+ def set(value: Boolean, mask: ContextMode) = if (value) |(mask) else &~(mask)
+
+ def inAll(required: ContextMode) = (this & required) == required
+ def inAny(required: ContextMode) = (this & required) != NOmode
+ def inNone(prohibited: ContextMode) = (this & prohibited) == NOmode
+
+ override def toString =
+ if (bits == 0) "NOmode"
+ else (contextModeNameMap filterKeys inAll).values.toList.sorted mkString " "
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
index 3e249e57bb..73572bcae9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package typechecker
-import scala.language.implicitConversions
-
/** A generic means of breaking down types into their subcomponents.
* Types are decomposed top down, and recognizable substructure is
* dispatched via self-apparently named methods. Those methods can
@@ -37,8 +35,6 @@ trait DestructureTypes {
def wrapSequence(nodes: List[Node]): Node
def wrapAtom[U](value: U): Node
- private implicit def liftToTerm(name: String): TermName = newTermName(name)
-
private val openSymbols = scala.collection.mutable.Set[Symbol]()
private def nodeList[T](elems: List[T], mkNode: T => Node): Node =
@@ -68,15 +64,6 @@ trait DestructureTypes {
},
tree.productPrefix
)
- def wrapSymbol(label: String, sym: Symbol): Node = {
- if (sym eq NoSymbol) wrapEmpty
- else atom(label, sym)
- }
- def wrapInfo(sym: Symbol) = sym.info match {
- case TypeBounds(lo, hi) => typeBounds(lo, hi)
- case PolyType(tparams, restpe) => polyFunction(tparams, restpe)
- case _ => wrapEmpty
- }
def wrapSymbolInfo(sym: Symbol): Node = {
if ((sym eq NoSymbol) || openSymbols(sym)) wrapEmpty
else {
@@ -99,7 +86,6 @@ trait DestructureTypes {
def constant(label: String, const: Constant): Node = atom(label, const)
def scope(decls: Scope): Node = node("decls", scopeMemberList(decls.toList))
- def const[T](named: (String, T)): Node = constant(named._1, Constant(named._2))
def resultType(restpe: Type): Node = this("resultType", restpe)
def typeParams(tps: List[Symbol]): Node = node("typeParams", symbolList(tps))
@@ -188,7 +174,6 @@ trait DestructureTypes {
case AntiPolyType(pre, targs) => product(tp, prefix(pre), typeArgs(targs))
case ClassInfoType(parents, decls, clazz) => product(tp, parentList(parents), scope(decls), wrapAtom(clazz))
case ConstantType(const) => product(tp, constant("value", const))
- case DeBruijnIndex(level, index, args) => product(tp, const("level" -> level), const("index" -> index), typeArgs(args))
case OverloadedType(pre, alts) => product(tp, prefix(pre), node("alts", typeList(alts map pre.memberType)))
case RefinedType(parents, decls) => product(tp, parentList(parents), scope(decls))
case SingleType(pre, sym) => product(tp, prefix(pre), wrapAtom(sym))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index f6142a81be..95b771a8a5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -17,12 +17,7 @@ import scala.collection.{ mutable, immutable }
*/
abstract class Duplicators extends Analyzer {
import global._
- import definitions.{ AnyRefClass, AnyValClass }
-
- def retyped(context: Context, tree: Tree): Tree = {
- resetClassOwners
- (newBodyDuplicator(context)).typed(tree)
- }
+ import definitions._
/** Retype the given tree in the given context. Use this method when retyping
* a method in a different class. The typer will replace references to the this of
@@ -33,7 +28,7 @@ abstract class Duplicators extends Analyzer {
if (oldThis ne newThis) {
oldClassOwner = oldThis
newClassOwner = newThis
- } else resetClassOwners
+ } else resetClassOwners()
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
@@ -42,9 +37,6 @@ abstract class Duplicators extends Analyzer {
protected def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
- def retypedMethod(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol): Tree =
- (newBodyDuplicator(context)).retypedMethod(tree.asInstanceOf[DefDef], oldThis, newThis)
-
/** Return the special typer for duplicate method bodies. */
override def newTyper(context: Context): Typer =
newBodyDuplicator(context)
@@ -88,7 +80,7 @@ abstract class Duplicators extends Analyzer {
BodyDuplicator.super.silent(_.typedType(Ident(sym.name))) match {
case SilentResultValue(t) =>
sym1 = t.symbol
- debuglog("fixed by trying harder: "+(sym, sym1, context))
+ debuglog("fixed by trying harder: "+((sym, sym1, context)))
case _ =>
}
}
@@ -186,31 +178,6 @@ abstract class Duplicators extends Analyzer {
stats.foreach(invalidate(_, owner))
}
- def retypedMethod(ddef: DefDef, oldThis: Symbol, newThis: Symbol): Tree = {
- oldClassOwner = oldThis
- newClassOwner = newThis
- invalidateAll(ddef.tparams)
- mforeach(ddef.vparamss) { vdef =>
- invalidate(vdef)
- vdef.tpe = null
- }
- ddef.symbol = NoSymbol
- enterSym(context, ddef)
- debuglog("remapping this of " + oldClassOwner + " to " + newClassOwner)
- typed(ddef)
- }
-
- private def inspectTpe(tpe: Type) = {
- tpe match {
- case MethodType(_, res) =>
- res + ", " + res.bounds.hi + ", " + (res.bounds.hi match {
- case TypeRef(_, _, args) if (args.length > 0) => args(0) + ", " + args(0).bounds.hi
- case _ => "non-tref: " + res.bounds.hi.getClass
- })
- case _ =>
- }
- }
-
/** Optionally cast this tree into some other type, if required.
* Unless overridden, just returns the tree.
*/
@@ -230,10 +197,10 @@ abstract class Duplicators extends Analyzer {
* their symbols are recreated ad-hoc and their types are fixed inline, instead of letting the
* namer/typer handle them, or Idents that refer to them.
*/
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
debuglog("typing " + tree + ": " + tree.tpe + ", " + tree.getClass)
val origtreesym = tree.symbol
- if (tree.hasSymbol && tree.symbol != NoSymbol
+ if (tree.hasSymbolField && tree.symbol != NoSymbol
&& !tree.symbol.isLabel // labels cannot be retyped by the type checker as LabelDef has no ValDef/return type trees
&& invalidSyms.isDefinedAt(tree.symbol)) {
debuglog("removed symbol " + tree.symbol)
@@ -243,40 +210,35 @@ abstract class Duplicators extends Analyzer {
tree match {
case ttree @ TypeTree() =>
// log("fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol)
- ttree.tpe = fixType(ttree.tpe)
- ttree
+ ttree modifyType fixType
case Block(stats, res) =>
debuglog("invalidating block")
invalidateAll(stats)
invalidate(res)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case ClassDef(_, _, _, tmpl @ Template(parents, _, stats)) =>
// log("invalidating classdef " + tree)
tmpl.symbol = tree.symbol.newLocalDummy(tree.pos)
invalidateAll(stats, tree.symbol)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case ddef @ DefDef(_, _, _, _, tpt, rhs) =>
- ddef.tpt.tpe = fixType(ddef.tpt.tpe)
- ddef.tpe = null
- super.typed(ddef, mode, pt)
+ ddef.tpt modifyType fixType
+ super.typed(ddef.clearType(), mode, pt)
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
//if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
- vdef.tpt.tpe = fixType(vdef.tpt.tpe)
- vdef.tpe = null
- super.typed(vdef, mode, pt)
+ vdef.tpt modifyType fixType
+ super.typed(vdef.clearType(), mode, pt)
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
// in case the rhs contains any definitions -- TODO: is this necessary?
invalidate(rhs)
- ldef.tpe = null
+ ldef.clearType()
// is this LabelDef generated by tailcalls?
val isTailLabel = (ldef.params.length >= 1) && (ldef.params.head.name == nme.THIS)
@@ -294,27 +256,23 @@ abstract class Duplicators extends Analyzer {
val params1 = params map newParam
val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
- rhs1.tpe = null
- super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
+ super.typed(treeCopy.LabelDef(tree, name, params1, rhs1.clearType()), mode, pt)
case Bind(name, _) =>
// log("bind: " + tree)
invalidate(tree)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Ident(_) if tree.symbol.isLabel =>
debuglog("Ident to labeldef " + tree + " switched to ")
tree.symbol = updateSym(tree.symbol)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Ident(_) if (origtreesym ne null) && origtreesym.isLazy =>
debuglog("Ident to a lazy val " + tree + ", " + tree.symbol + " updated to " + origtreesym)
tree.symbol = updateSym(origtreesym)
- tree.tpe = null
- super.typed(tree, mode, pt)
+ super.typed(tree.clearType(), mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
// We use the symbol name instead of the tree name because the symbol
@@ -336,9 +294,15 @@ abstract class Duplicators extends Analyzer {
case ((alt, tpe)) :: Nil =>
log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
Select(This(newClassOwner), alt)
- case _ =>
- log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
- nameSelection
+ case xs =>
+ alts filter (alt => (alt.paramss corresponds tree.symbol.paramss)(_.size == _.size)) match {
+ case alt :: Nil =>
+ log(s"Resorted to parameter list arity to disambiguate to $alt\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberTypes. Attempting name-based selection, but we may crash later.")
+ nameSelection
+ }
}
}
else nameSelection
@@ -367,7 +331,7 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
*/
case Match(scrut, cases) =>
- val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType)
+ val scrut1 = typedByValueExpr(scrut)
val scrutTpe = scrut1.tpe.widen
val cases1 = {
if (scrutTpe.isFinalType) cases filter {
@@ -382,8 +346,8 @@ abstract class Duplicators extends Analyzer {
// Without this, AnyRef specializations crash on patterns like
// case _: Boolean => ...
// Not at all sure this is safe.
- else if (scrutTpe <:< AnyRefClass.tpe)
- cases filterNot (_.pat.tpe <:< AnyValClass.tpe)
+ else if (scrutTpe <:< AnyRefTpe)
+ cases filterNot (_.pat.tpe <:< AnyValTpe)
else
cases
}
@@ -397,7 +361,7 @@ abstract class Duplicators extends Analyzer {
case _ =>
debuglog("Duplicators default case: " + tree.summaryString)
debuglog(" ---> " + tree)
- if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+ if (tree.hasSymbolField && tree.symbol != NoSymbol && (tree.symbol.owner == AnyClass)) {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
val ntree = castType(tree, pt)
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 57b9dfe3e4..7092f00bff 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -33,7 +33,7 @@ trait EtaExpansion { self: Analyzer =>
}
/** <p>
- * Expand partial function applications of type <code>type</code>.
+ * Expand partial function applications of type `type`.
* </p><pre>
* p.f(es_1)...(es_n)
* ==> {
@@ -56,11 +56,8 @@ trait EtaExpansion { self: Analyzer =>
}
val defs = new ListBuffer[Tree]
- /** Append to <code>defs</code> value definitions for all non-stable
- * subexpressions of the function application <code>tree</code>.
- *
- * @param tree ...
- * @return ...
+ /* Append to `defs` value definitions for all non-stable
+ * subexpressions of the function application `tree`.
*/
def liftoutPrefix(tree: Tree): Tree = {
def liftout(tree: Tree, byName: Boolean): Tree =
@@ -97,12 +94,12 @@ trait EtaExpansion { self: Analyzer =>
// with repeated params, there might be more or fewer args than params
liftout(arg, byName(i).getOrElse(false))
}
- treeCopy.Apply(tree, liftoutPrefix(fn), newArgs) setType null
+ treeCopy.Apply(tree, liftoutPrefix(fn), newArgs).clearType()
case TypeApply(fn, args) =>
- treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
+ treeCopy.TypeApply(tree, liftoutPrefix(fn), args).clearType()
case Select(qual, name) =>
val name = tree.symbol.name // account for renamed imports, SI-7233
- treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
+ treeCopy.Select(tree, liftout(qual, byName = false), name).clearType() setSymbol NoSymbol
case Ident(name) =>
tree
}
@@ -110,8 +107,7 @@ trait EtaExpansion { self: Analyzer =>
tree1
}
- /** Eta-expand lifted tree.
- */
+ /* Eta-expand lifted tree. */
def expand(tree: Tree, tpe: Type): Tree = tpe match {
case mt @ MethodType(paramSyms, restpe) if !mt.isImplicit =>
val params: List[(ValDef, Boolean)] = paramSyms.map {
@@ -119,7 +115,7 @@ trait EtaExpansion { self: Analyzer =>
val origTpe = sym.tpe
val isRepeated = definitions.isRepeatedParamType(origTpe)
// SI-4176 Don't leak A* in eta-expanded function types. See t4176b.scala
- val droppedStarTpe = if (settings.etaExpandKeepsStar.value) origTpe else dropRepeatedParamType(origTpe)
+ val droppedStarTpe = if (settings.etaExpandKeepsStar) origTpe else dropIllegalStarTypes(origTpe)
val valDef = ValDef(Modifiers(SYNTHETIC | PARAM), sym.name.toTermName, TypeTree(droppedStarTpe), EmptyTree)
(valDef, isRepeated)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 35a4461ccc..b53efafdd4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -8,7 +8,8 @@
//todo: disallow C#D in superclass
//todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.annotation.tailrec
@@ -30,11 +31,11 @@ trait Implicits {
import global._
import definitions._
import ImplicitsStats._
- import typeDebug.{ ptTree, ptBlock, ptLine }
+ import typeDebug.{ ptBlock, ptLine }
import global.typer.{ printTyping, deindentTyping, indentTyping, printInference }
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult =
- inferImplicit(tree, pt, reportAmbiguous, isView, context, true, tree.pos)
+ inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent = true, tree.pos)
def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult =
inferImplicit(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, tree.pos)
@@ -80,9 +81,12 @@ trait Implicits {
printTyping("typing implicit: %s %s".format(tree, context.undetparamsString))
val implicitSearchContext = context.makeImplicit(reportAmbiguous)
val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit
- if ((result.isFailure || !settings.Xdivergence211.value) && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
- context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent))
- debugwarn("update buffer: " + implicitSearchContext.errBuffer)
+ if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.hasErrors) {
+ context.updateBuffer(implicitSearchContext.reportBuffer.errors.collect {
+ case dte: DivergentImplicitTypeError => dte
+ case ate: AmbiguousImplicitTypeError => ate
+ })
+ debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors)
}
printInference("[infer implicit] inferred " + result)
context.undetparams = context.undetparams filterNot result.subst.from.contains
@@ -101,24 +105,14 @@ trait Implicits {
def inferImplicit(tree: Tree, pt: Type, isView: Boolean, context: Context, silent: Boolean, withMacrosDisabled: Boolean, pos: Position, onError: (Position, String) => Unit): Tree = {
val wrapper1 = if (!withMacrosDisabled) (context.withMacrosEnabled[SearchResult] _) else (context.withMacrosDisabled[SearchResult] _)
def wrapper(inference: => SearchResult) = wrapper1(inference)
- def fail(reason: Option[String]) = {
- if (!silent) {
- if (context.hasErrors) onError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
- else onError(pos, reason getOrElse "implicit search has failed. to find out the reason, turn on -Xlog-implicits")
- }
- EmptyTree
- }
- try {
- wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos)) match {
- case failure if failure.tree.isEmpty => fail(None)
- case success => success.tree
- }
- } catch {
- case ex: DivergentImplicit =>
- if (settings.Xdivergence211.value)
- debugwarn("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
- fail(Some("divergent implicit expansion"))
+ val result = wrapper(inferImplicit(tree, pt, reportAmbiguous = true, isView = isView, context = context, saveAmbiguousDivergent = !silent, pos = pos))
+ if (result.isFailure && !silent) {
+ val err = context.firstError
+ val errPos = err.map(_.errPos).getOrElse(pos)
+ val errMsg = err.map(_.errMsg).getOrElse("implicit search has failed. to find out the reason, turn on -Xlog-implicits")
+ onError(errPos, errMsg)
}
+ result.tree
}
/** Find all views from type `tp` (in which `tpars` are free)
@@ -137,7 +131,7 @@ trait Implicits {
val tvars = tpars map (TypeVar untouchable _)
val tpSubsted = tp.subst(tpars, tvars)
- val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyClass.tpe), true, context.makeImplicit(false))
+ val search = new ImplicitSearch(EmptyTree, functionType(List(tpSubsted), AnyTpe), true, context.makeImplicit(reportAmbiguousErrors = false))
search.allImplicitsPoly(tvars)
}
@@ -157,7 +151,7 @@ trait Implicits {
}
/* Map a polytype to one in which all type parameters and argument-dependent types are replaced by wildcards.
- * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate DebruijnIndex types
+ * Consider `implicit def b(implicit x: A): x.T = error("")`. We need to approximate debruijn index types
* when checking whether `b` is a valid implicit, as we haven't even searched a value for the implicit arg `x`,
* so we have to approximate (otherwise it is excluded a priori).
*/
@@ -177,7 +171,6 @@ trait Implicits {
def isFailure = false
def isAmbiguousFailure = false
- // only used when -Xdivergence211 is turned on
def isDivergent = false
final def isSuccess = !isFailure
}
@@ -186,7 +179,6 @@ trait Implicits {
override def isFailure = true
}
- // only used when -Xdivergence211 is turned on
lazy val DivergentSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
override def isDivergent = true
@@ -231,15 +223,7 @@ trait Implicits {
tp.isError
}
- /** Todo reconcile with definition of stability given in Types.scala */
- private def isStable(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, _) =>
- sym.isPackageClass ||
- sym.isModuleClass && isStable(pre) /*||
- sym.isAliasType && isStable(tp.normalize)*/
- case _ => tp.isStable
- }
- def isStablePrefix = isStable(pre)
+ def isStablePrefix = pre.isStable
override def equals(other: Any) = other match {
case that: ImplicitInfo =>
@@ -249,7 +233,10 @@ trait Implicits {
case _ => false
}
override def hashCode = name.## + pre.## + sym.##
- override def toString = name + ": " + tpe
+ override def toString = (
+ if (tpeCache eq null) name + ": ?"
+ else name + ": " + tpe
+ )
}
/** A class which is used to track pending implicits to prevent infinite implicit searches.
@@ -281,16 +268,13 @@ trait Implicits {
object HasMember {
private val hasMemberCache = perRunCaches.newMap[Name, Type]()
def apply(name: Name): Type = hasMemberCache.getOrElseUpdate(name, memberWildcardType(name, WildcardType))
- def unapply(pt: Type): Option[Name] = pt match {
- case RefinedType(List(WildcardType), Scope(sym)) if sym.tpe == WildcardType => Some(sym.name)
- case _ => None
}
- }
/** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp }
*/
object HasMethodMatching {
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy"))
+ val dummyMethod = NoSymbol.newTermSymbol("typer$dummy") setInfo NullaryMethodType(AnyTpe)
+
def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe))
def apply(name: Name, argtpes: List[Type], restpe: Type): Type = {
@@ -317,7 +301,7 @@ trait Implicits {
*/
object Function1 {
val Sym = FunctionClass(1)
- def unapply(tp: Type) = tp match {
+ def unapply(tp: Type) = tp baseType Sym match {
case TypeRef(_, Sym, arg1 :: arg2 :: _) => Some((arg1, arg2))
case _ => None
}
@@ -347,7 +331,7 @@ trait Implicits {
def pos = if (pos0 != NoPosition) pos0 else tree.pos
def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = {
- if (settings.XlogImplicits.value)
+ if (settings.XlogImplicits)
reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason)
SearchFailure
}
@@ -360,7 +344,7 @@ trait Implicits {
(info2 == NoImplicitInfo) ||
(info1 != NoImplicitInfo) && {
if (info1.sym.isStatic && info2.sym.isStatic) {
- improvesCache get (info1, info2) match {
+ improvesCache get ((info1, info2)) match {
case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b
case None =>
val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym)
@@ -388,7 +372,7 @@ trait Implicits {
* if one or both are intersection types with a pair of overlapping parent types.
*/
private def dominates(dtor: Type, dted: Type): Boolean = {
- def core(tp: Type): Type = tp.normalize match {
+ def core(tp: Type): Type = tp.dealiasWiden match {
case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner)
case AnnotatedType(annots, tp, selfsym) => core(tp)
case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi)))
@@ -403,11 +387,11 @@ trait Implicits {
deriveTypeWithWildcards(syms.distinct)(tp)
}
def sum(xs: List[Int]) = (0 /: xs)(_ + _)
- def complexity(tp: Type): Int = tp.normalize match {
+ def complexity(tp: Type): Int = tp.dealiasWiden match {
case NoPrefix =>
0
case SingleType(pre, sym) =>
- if (sym.isPackage) 0 else complexity(tp.normalize.widen)
+ if (sym.isPackage) 0 else complexity(tp.dealiasWiden)
case TypeRef(pre, sym, args) =>
complexity(pre) + sum(args map complexity) + 1
case RefinedType(parents, _) =>
@@ -458,45 +442,21 @@ trait Implicits {
(context.openImplicits find { case OpenImplicit(info, tp, tree1) => !info.sym.isMacro && tree1.symbol == tree.symbol && dominates(pt, tp)}) match {
case Some(pending) =>
//println("Pending implicit "+pending+" dominates "+pt+"/"+undetParams) //@MDEBUG
- if (settings.Xdivergence211.value) DivergentSearchFailure
- else throw DivergentImplicit
+ DivergentSearchFailure
case None =>
- def pre211DivergenceLogic() = {
try {
context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
// println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- typedImplicit0(info, ptChecked, isLocal)
- } catch {
- case ex: DivergentImplicit =>
+ val result = typedImplicit0(info, ptChecked, isLocal)
+ if (result.isDivergent) {
//println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
- if (context.openImplicits.tail.isEmpty) {
- if (!pt.isErroneous && !info.sym.isMacro)
- DivergingImplicitExpansionError(tree, pt, info.sym)(context)
- SearchFailure
- } else {
- throw DivergentImplicit
- }
+ if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
+ DivergingImplicitExpansionError(tree, pt, info.sym)(context)
+ }
+ result
} finally {
context.openImplicits = context.openImplicits.tail
}
- }
- def post211DivergenceLogic() = {
- try {
- context.openImplicits = OpenImplicit(info, pt, tree) :: context.openImplicits
- // println(" "*context.openImplicits.length+"typed implicit "+info+" for "+pt) //@MDEBUG
- val result = typedImplicit0(info, ptChecked, isLocal)
- if (result.isDivergent) {
- //println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG
- if (context.openImplicits.tail.isEmpty && !pt.isErroneous)
- DivergingImplicitExpansionError(tree, pt, info.sym)(context)
- }
- result
- } finally {
- context.openImplicits = context.openImplicits.tail
- }
- }
- if (settings.Xdivergence211.value) post211DivergenceLogic()
- else pre211DivergenceLogic()
}
}
@@ -512,10 +472,8 @@ trait Implicits {
val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
val result = normSubType(tp, pt) || isView && {
pt match {
- case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) =>
- matchesPtView(tp, arg1, arg2, undet)
- case _ =>
- false
+ case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet)
+ case _ => false
}
}
if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start)
@@ -656,21 +614,23 @@ trait Implicits {
)
def fail(reason: String): SearchResult = failure(itree, reason)
+ def fallback = typed1(itree, EXPRmode, wildPt)
try {
- val itree1 =
- if (isView) {
- val arg1 :: arg2 :: _ = pt.typeArgs
+ val itree1 = if (!isView) fallback else pt match {
+ case Function1(arg1, arg2) =>
typed1(
atPos(itree.pos)(Apply(itree, List(Ident("<argument>") setType approximate(arg1)))),
EXPRmode,
approximate(arg2)
)
- }
- else
- typed1(itree, EXPRmode, wildPt)
-
- if (context.hasErrors)
- return fail(context.errBuffer.head.errMsg)
+ case _ => fallback
+ }
+ context.firstError match { // using match rather than foreach to avoid non local return.
+ case Some(err) =>
+ log("implicit adapt failed: " + err.errMsg)
+ return fail(err.errMsg)
+ case None =>
+ }
if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
@@ -692,7 +652,7 @@ trait Implicits {
}
if (context.hasErrors)
- fail("hasMatchingSymbol reported error: " + context.errBuffer.head.errMsg)
+ fail("hasMatchingSymbol reported error: " + context.firstError.get.errMsg)
else if (isLocal && !hasMatchingSymbol(itree1))
fail("candidate implicit %s is shadowed by %s".format(
info.sym.fullLocationString, itree1.symbol.fullLocationString))
@@ -711,12 +671,15 @@ trait Implicits {
printTyping(ptLine("" + info.sym, "tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr)))
val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt),
- false, lubDepth(List(itree2.tpe, pt)))
+ upper = false, lubDepth(List(itree2.tpe, pt)))
// #2421: check that we correctly instantiated type parameters outside of the implicit tree:
checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ")
- if (context.hasErrors)
- return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + context.errBuffer.head.errMsg)
+ context.firstError match {
+ case Some(err) =>
+ return fail("type parameters weren't correctly instantiated outside of the implicit tree: " + err.errMsg)
+ case None =>
+ }
// filter out failures from type inference, don't want to remove them from undetParams!
// we must be conservative in leaving type params in undetparams
@@ -741,23 +704,24 @@ trait Implicits {
// duplicating the code here, but this is probably a
// hotspot (and you can't just call typed, need to force
// re-typecheck)
- // TODO: the return tree is ignored. This seems to make
- // no difference, but it's bad practice regardless.
-
-
- val checked = itree2 match {
+ //
+ // This is just called for the side effect of error detection,
+ // see SI-6966 to see what goes wrong if we use the result of this
+ // as the SearchResult.
+ itree2 match {
case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args)
case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c
case t => t
}
- if (context.hasErrors)
- fail("typing TypeApply reported errors for the implicit tree: " + context.errBuffer.head.errMsg)
- else {
- val result = new SearchResult(itree2, subst)
- if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
- printInference("[success] found %s for pt %s".format(result, ptInstantiated))
- result
+ context.firstError match {
+ case Some(err) =>
+ fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg)
+ case None =>
+ val result = new SearchResult(itree2, subst)
+ if (Statistics.canEnable) Statistics.incCounter(foundImplicits)
+ printInference("[success] found %s for pt %s".format(result, ptInstantiated))
+ result
}
}
else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated))
@@ -865,26 +829,6 @@ trait Implicits {
/** Preventing a divergent implicit from terminating implicit search,
* so that if there is a best candidate it can still be selected.
- *
- * The old way of handling divergence.
- * Only enabled when -Xdivergence211 is turned off.
- */
- private var divergence = false
- private val divergenceHandler: PartialFunction[Throwable, SearchResult] = {
- var remaining = 1;
- { case x: DivergentImplicit if remaining > 0 =>
- remaining -= 1
- divergence = true
- log("discarding divergent implicit during implicit search")
- SearchFailure
- }
- }
-
- /** Preventing a divergent implicit from terminating implicit search,
- * so that if there is a best candidate it can still be selected.
- *
- * The new way of handling divergence.
- * Only enabled when -Xdivergence211 is turned on.
*/
object DivergentImplicitRecovery {
// symbol of the implicit that caused the divergence.
@@ -928,24 +872,15 @@ trait Implicits {
@tailrec private def rankImplicits(pending: Infos, acc: Infos): Infos = pending match {
case Nil => acc
case i :: is =>
- def pre211tryImplicitInfo(i: ImplicitInfo) =
- try typedImplicit(i, ptChecked = true, isLocal)
- catch divergenceHandler
-
- def post211tryImplicitInfo(i: ImplicitInfo) =
- DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i)
-
- {
- if (settings.Xdivergence211.value) post211tryImplicitInfo(i)
- else pre211tryImplicitInfo(i)
- } match {
- // only used if -Xdivergence211 is turned on
+ DivergentImplicitRecovery(typedImplicit(i, ptChecked = true, isLocal), i) match {
case sr if sr.isDivergent =>
Nil
case sr if sr.isFailure =>
// We don't want errors that occur during checking implicit info
// to influence the check of further infos.
- context.condBufferFlush(_.kind != ErrorKinds.Divergent)
+ context.reportBuffer.retainErrors {
+ case err: DivergentImplicitTypeError => true
+ }
rankImplicits(is, acc)
case newBest =>
best = newBest
@@ -990,12 +925,11 @@ trait Implicits {
}
if (best.isFailure) {
- /** If there is no winner, and we witnessed and caught divergence,
- * now we can throw it for the error message.
+ /* If there is no winner, and we witnessed and caught divergence,
+ * now we can throw it for the error message.
*/
- if (divergence || DivergentImplicitRecovery.sym != null) {
- if (settings.Xdivergence211.value) DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
- else throw DivergentImplicit
+ if (DivergentImplicitRecovery.sym != null) {
+ DivergingImplicitExpansionError(tree, pt, DivergentImplicitRecovery.sym)(context)
}
if (invalidImplicits.nonEmpty)
@@ -1053,8 +987,8 @@ trait Implicits {
*/
private def companionImplicitMap(tp: Type): InfoMap = {
- /** Populate implicit info map by traversing all parts of type `tp`.
- * Parameters as for `getParts`.
+ /* Populate implicit info map by traversing all parts of type `tp`.
+ * Parameters as for `getParts`.
*/
def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) = tp match {
case TypeRef(pre, sym, args) =>
@@ -1086,13 +1020,13 @@ trait Implicits {
}
}
- /** Populate implicit info map by traversing all parts of type `tp`.
- * This method is performance critical.
- * @param tp The type for which we want to traverse parts
- * @param infoMap The infoMap in which implicit infos corresponding to parts are stored
- * @param seen The types that were already visited previously when collecting parts for the given infoMap
- * @param pending The set of static symbols for which we are currently trying to collect their parts
- * in order to cache them in infoMapCache
+ /* Populate implicit info map by traversing all parts of type `tp`.
+ * This method is performance critical.
+ * @param tp The type for which we want to traverse parts
+ * @param infoMap The infoMap in which implicit infos corresponding to parts are stored
+ * @param seen The types that were already visited previously when collecting parts for the given infoMap
+ * @param pending The set of static symbols for which we are currently trying to collect their parts
+ * in order to cache them in infoMapCache
*/
def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) {
if (seen(tp))
@@ -1110,7 +1044,7 @@ trait Implicits {
case Some(imap) => imap
case None =>
val result = new InfoMap
- getClassParts(sym.tpe)(result, new mutable.HashSet(), pending + sym)
+ getClassParts(sym.tpeHK)(result, new mutable.HashSet(), pending + sym)
infoMapCache(sym) = result
result
}
@@ -1204,8 +1138,10 @@ trait Implicits {
try {
val tree1 = typedPos(pos.focus)(arg)
- if (context.hasErrors) processMacroExpansionError(context.errBuffer.head.errPos, context.errBuffer.head.errMsg)
- else new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ context.firstError match {
+ case Some(err) => processMacroExpansionError(err.errPos, err.errMsg)
+ case None => new SearchResult(tree1, EmptyTreeTypeSubstituter)
+ }
} catch {
case ex: TypeError =>
processMacroExpansionError(ex.pos, ex.msg)
@@ -1222,7 +1158,7 @@ trait Implicits {
case ThisType(thisSym) =>
gen.mkAttributedThis(thisSym)
case _ =>
- // if ``pre'' is not a PDT, e.g. if someone wrote
+ // if `pre` is not a PDT, e.g. if someone wrote
// implicitly[scala.reflect.macros.Context#TypeTag[Int]]
// then we need to fail, because we don't know the prefix to use during type reification
// upd. we also need to fail silently, because this is a very common situation
@@ -1236,8 +1172,8 @@ trait Implicits {
}
)
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
- var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
- if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
+ val materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
+ if (settings.XlogImplicits) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
if (context.macrosEnabled) success(materializer)
// don't call `failure` here. if macros are disabled, we just fail silently
// otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled"
@@ -1255,23 +1191,23 @@ trait Implicits {
val full = flavor == FullManifestClass
val opt = flavor == OptManifestClass
- /** Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
+ /* Creates a tree that calls the factory method called constructor in object scala.reflect.Manifest */
def manifestFactoryCall(constructor: String, tparg: Type, args: Tree*): Tree =
if (args contains EmptyTree) EmptyTree
else typedPos(tree.pos.focus) {
val mani = gen.mkManifestFactoryCall(full, constructor, tparg, args.toList)
- if (settings.debug.value) println("generated manifest: "+mani) // DEBUG
+ if (settings.debug) println("generated manifest: "+mani) // DEBUG
mani
}
- /** Creates a tree representing one of the singleton manifests.*/
+ /* Creates a tree representing one of the singleton manifests.*/
def findSingletonManifest(name: String) = typedPos(tree.pos.focus) {
Select(gen.mkAttributedRef(FullManifestModule), name)
}
- /** Re-wraps a type in a manifest before calling inferImplicit on the result */
+ /* Re-wraps a type in a manifest before calling inferImplicit on the result */
def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) =
- inferImplicit(tree, appliedType(manifestClass, tp), true, false, context).tree
+ inferImplicit(tree, appliedType(manifestClass, tp), reportAmbiguous = true, isView = false, context).tree
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = {
@@ -1313,8 +1249,8 @@ trait Implicits {
// looking for a manifest of a type parameter that hasn't been inferred by now,
// can't do much, but let's not fail
else if (undetParams contains sym) {
- // #3859: need to include the mapping from sym -> NothingClass.tpe in the SearchResult
- mot(NothingClass.tpe, sym :: from, NothingClass.tpe :: to)
+ // #3859: need to include the mapping from sym -> NothingTpe in the SearchResult
+ mot(NothingTpe, sym :: from, NothingTpe :: to)
} else {
// a manifest should have been found by normal searchImplicit
EmptyTree
@@ -1406,7 +1342,7 @@ trait Implicits {
val failstart = if (Statistics.canEnable) Statistics.startTimer(inscopeFailNanos) else null
val succstart = if (Statistics.canEnable) Statistics.startTimer(inscopeSucceedNanos) else null
- var result = searchImplicit(context.implicitss, true)
+ var result = searchImplicit(context.implicitss, isLocal = true)
if (result.isFailure) {
if (Statistics.canEnable) Statistics.stopTimer(inscopeFailNanos, failstart)
@@ -1424,23 +1360,18 @@ trait Implicits {
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
- if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
+ if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, isLocal = false)
if (result.isFailure) {
context.updateBuffer(previousErrs)
if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
- if (wasAmbigious && settings.lint.value)
- reporter.warning(tree.pos,
- "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
- previousErrs.map(_.errMsg).mkString("\n"))
-
if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
}
- if (result.isFailure && settings.debug.value)
+ if (result.isFailure && settings.debug)
log("no implicits found for "+pt+" "+pt.typeSymbol.info.baseClasses+" "+implicitsOfExpectedType)
result
@@ -1448,7 +1379,7 @@ trait Implicits {
def allImplicits: List[SearchResult] = {
def search(iss: Infoss, isLocal: Boolean) = applicableInfos(iss, isLocal).values
- (search(context.implicitss, true) ++ search(implicitsOfExpectedType, false)).toList.filter(_.tree ne EmptyTree)
+ (search(context.implicitss, isLocal = true) ++ search(implicitsOfExpectedType, isLocal = false)).toList.filter(_.tree ne EmptyTree)
}
// find all implicits for some type that contains type variables
@@ -1509,7 +1440,6 @@ trait Implicits {
interpolate(msg, Map((typeParamNames zip typeArgs): _*)) // TODO: give access to the name and type of the implicit argument, etc?
def validate: Option[String] = {
- import scala.util.matching.Regex; import scala.collection.breakOut
// is there a shorter way to avoid the intermediate toList?
val refs = """\$\{([^}]+)\}""".r.findAllIn(msg).matchData.map(_ group 1).toSet
val decls = typeParamNames.toSet
@@ -1535,9 +1465,7 @@ object ImplicitsStats {
val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount)
val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount)
val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount)
- val subtypeImprovCount = Statistics.newSubCounter(" of which in improves", subtypeCount)
val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer")
- val triedImplicits = Statistics.newSubCounter(" #tried", implicitSearchCount)
val plausiblyCompatibleImplicits
= Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount)
val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount)
@@ -1557,7 +1485,3 @@ object ImplicitsStats {
val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer")
val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs)
}
-
-// only used when -Xdivergence211 is turned off
-class DivergentImplicit extends Exception
-object DivergentImplicit extends DivergentImplicit
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 55e0a954f0..961ef484d8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -7,12 +7,10 @@ package scala.tools.nsc
package typechecker
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
import scala.util.control.ControlThrowable
import symtab.Flags._
-import scala.annotation.tailrec
-/** This trait ...
+/** This trait contains methods related to type parameter inference.
*
* @author Martin Odersky
* @version 1.0
@@ -25,13 +23,8 @@ trait Infer extends Checkable {
import typer.printInference
import typeDebug.ptBlock
-/* -- Type parameter inference utility functions --------------------------- */
-
- private def assertNonCyclic(tvar: TypeVar) =
- assert(tvar.constr.inst != tvar, tvar.origin)
-
- /** The formal parameter types corresponding to <code>formals</code>.
- * If <code>formals</code> has a repeated last parameter, a list of
+ /** The formal parameter types corresponding to `formals`.
+ * If `formals` has a repeated last parameter, a list of
* (nargs - params.length + 1) copies of its type is returned.
* By-name types are replaced with their underlying type.
*
@@ -39,16 +32,41 @@ trait Infer extends Checkable {
* @param removeRepeated allows keeping repeated parameter (if there's one argument). Used in NamesDefaults.
*/
def formalTypes(formals: List[Type], nargs: Int, removeByName: Boolean = true, removeRepeated: Boolean = true): List[Type] = {
- val formals1 = if (removeByName) formals mapConserve {
- case TypeRef(_, ByNameParamClass, List(arg)) => arg
- case formal => formal
- } else formals
+ val formals1 = if (removeByName) formals mapConserve dropByName else formals
if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
val ft = formals1.last.dealiasWiden.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
}
+ /** Sorts the alternatives according to the given comparison function.
+ * Returns a list containing the best alternative as well as any which
+ * the best fails to improve upon.
+ */
+ private def bestAlternatives(alternatives: List[Symbol])(isBetter: (Symbol, Symbol) => Boolean): List[Symbol] = {
+ def improves(sym1: Symbol, sym2: Symbol) = (
+ (sym2 eq NoSymbol)
+ || sym2.isError
+ || (sym2 hasAnnotation BridgeClass)
+ || isBetter(sym1, sym2)
+ )
+
+ alternatives sortWith improves match {
+ case best :: rest if rest.nonEmpty => best :: rest.filterNot(alt => improves(best, alt))
+ case bests => bests
+ }
+ }
+
+ // we must not allow CyclicReference to be thrown when sym.info is called
+ // in checkAccessible, because that would mark the symbol erroneous, which it
+ // is not. But if it's a true CyclicReference then macro def will report it.
+ // See comments to TypeSigError for an explanation of this special case.
+ // [Eugene] is there a better way?
+ private object CheckAccessibleMacroCycle extends TypeCompleter {
+ val tree = EmptyTree
+ override def complete(sym: Symbol) = ()
+ }
+
/** Returns `(formals, formalsExpanded)` where `formalsExpanded` are the expected types
* for the `nbSubPats` sub-patterns of an extractor pattern, of which the corresponding
* unapply[Seq] call is assumed to have result type `resTp`.
@@ -119,7 +137,7 @@ trait Infer extends Checkable {
else if (optionArgs.nonEmpty)
if (nbSubPats == 1) {
val productArity = productArgs.size
- if (settings.lint.value && productArity > 1 && productArity != effectiveNbSubPats)
+ if (productArity > 1 && productArity != effectiveNbSubPats && settings.lint)
global.currentUnit.warning(pos,
s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
optionArgs
@@ -139,21 +157,7 @@ trait Infer extends Checkable {
else (formals, formalsExpanded)
}
- def actualTypes(actuals: List[Type], nformals: Int): List[Type] =
- if (nformals == 1 && !hasLength(actuals, 1))
- List(if (actuals.isEmpty) UnitClass.tpe else tupleType(actuals))
- else actuals
-
- def actualArgs(pos: Position, actuals: List[Tree], nformals: Int): List[Tree] = {
- val inRange = nformals == 1 && !hasLength(actuals, 1) && actuals.lengthCompare(MaxTupleArity) <= 0
- if (inRange && !phase.erasedTypes) List(atPos(pos)(gen.mkTuple(actuals)))
- else actuals
- }
-
/** A fresh type variable with given type parameter as origin.
- *
- * @param tparam ...
- * @return ...
*/
def freshVar(tparam: Symbol): TypeVar = TypeVar(tparam)
@@ -170,50 +174,34 @@ trait Infer extends Checkable {
*/
object instantiate extends TypeMap {
private var excludedVars = immutable.Set[TypeVar]()
+ private def applyTypeVar(tv: TypeVar): Type = tv match {
+ case TypeVar(origin, constr) if !constr.instValid => throw new DeferredNoInstance(() => s"no unique instantiation of type variable $origin could be found")
+ case _ if excludedVars(tv) => throw new NoInstance("cyclic instantiation")
+ case TypeVar(_, constr) =>
+ excludedVars += tv
+ try apply(constr.inst)
+ finally excludedVars -= tv
+ }
def apply(tp: Type): Type = tp match {
- case WildcardType | BoundedWildcardType(_) | NoType =>
- throw new NoInstance("undetermined type")
- case tv @ TypeVar(origin, constr) if !tv.untouchable =>
- if (constr.inst == NoType) {
- throw new DeferredNoInstance(() =>
- "no unique instantiation of type variable " + origin + " could be found")
- } else if (excludedVars(tv)) {
- throw new NoInstance("cyclic instantiation")
- } else {
- excludedVars += tv
- val res = apply(constr.inst)
- excludedVars -= tv
- res
- }
- case _ =>
- mapOver(tp)
+ case WildcardType | BoundedWildcardType(_) | NoType => throw new NoInstance("undetermined type")
+ case tv: TypeVar if !tv.untouchable => applyTypeVar(tv)
+ case _ => mapOver(tp)
}
}
+ @inline final def falseIfNoInstance(body: => Boolean): Boolean =
+ try body catch { case _: NoInstance => false }
+
/** Is type fully defined, i.e. no embedded anytypes or wildcards in it?
- *
- * @param tp ...
- * @return ...
*/
private[typechecker] def isFullyDefined(tp: Type): Boolean = tp match {
- case WildcardType | BoundedWildcardType(_) | NoType =>
- false
- case NoPrefix | ThisType(_) | ConstantType(_) =>
- true
- case TypeRef(pre, sym, args) =>
- isFullyDefined(pre) && (args forall isFullyDefined)
- case SingleType(pre, sym) =>
- isFullyDefined(pre)
- case RefinedType(ts, decls) =>
- ts forall isFullyDefined
- case TypeVar(origin, constr) if (constr.inst == NoType) =>
- false
- case _ =>
- try {
- instantiate(tp); true
- } catch {
- case ex: NoInstance => false
- }
+ case WildcardType | BoundedWildcardType(_) | NoType => false
+ case NoPrefix | ThisType(_) | ConstantType(_) => true
+ case TypeRef(pre, _, args) => isFullyDefined(pre) && (args forall isFullyDefined)
+ case SingleType(pre, _) => isFullyDefined(pre)
+ case RefinedType(ts, _) => ts forall isFullyDefined
+ case TypeVar(_, constr) if constr.inst == NoType => false
+ case _ => falseIfNoInstance({ instantiate(tp) ; true })
}
/** Solve constraint collected in types `tvars`.
@@ -226,7 +214,7 @@ trait Infer extends Checkable {
* @throws NoInstance
*/
def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean, depth: Int): List[Type] = {
+ variances: List[Variance], upper: Boolean, depth: Int): List[Type] = {
if (tvars.nonEmpty)
printInference("[solve types] solving for " + tparams.map(_.name).mkString(", ") + " in " + tvars.mkString(", "))
@@ -265,16 +253,12 @@ trait Infer extends Checkable {
* This method seems to be performance critical.
*/
def normalize(tp: Type): Type = tp match {
- case mt @ MethodType(params, restpe) if mt.isImplicit =>
- normalize(restpe)
- case mt @ MethodType(_, restpe) if !mt.isDependentMethodType =>
- functionType(mt.paramTypes, normalize(restpe))
- case NullaryMethodType(restpe) =>
- normalize(restpe)
- case ExistentialType(tparams, qtpe) =>
- newExistentialType(tparams, normalize(qtpe))
- case tp1 =>
- tp1 // @MAT aliases already handled by subtyping
+ case PolyType(_, restpe) => logResult(s"Normalizing $tp in infer")(normalize(restpe))
+ case mt @ MethodType(_, restpe) if mt.isImplicit => normalize(restpe)
+ case mt @ MethodType(_, restpe) if !mt.isDependentMethodType => functionType(mt.paramTypes, normalize(restpe))
+ case NullaryMethodType(restpe) => normalize(restpe)
+ case ExistentialType(tparams, qtpe) => newExistentialType(tparams, normalize(qtpe))
+ case _ => tp // @MAT aliases already handled by subtyping
}
private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
@@ -286,12 +270,8 @@ trait Infer extends Checkable {
/* -- Error Messages --------------------------------------------------- */
def setError[T <: Tree](tree: T): T = {
- debuglog("set error: "+ tree)
- // this breaks -Ydebug pretty radically
- // if (settings.debug.value) { // DEBUG
- // println("set error: "+tree);
- // throw new Error()
- // }
+ // SI-7388, one can incur a cycle calling sym.toString
+ // (but it'd be nicer if that weren't so)
def name = {
val sym = tree.symbol
val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
@@ -301,7 +281,7 @@ trait Infer extends Checkable {
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
- if (tree.hasSymbol)
+ if (tree.hasSymbolField)
tree setSymbol errorSym
tree setType ErrorType
@@ -311,102 +291,87 @@ trait Infer extends Checkable {
def issue(err: AbsTypeError): Unit = context.issue(err)
- def isPossiblyMissingArgs(found: Type, req: Type) = (
- false
- /** However it is that this condition is expected to imply
- * "is possibly missing args", it is too weak. It is
- * better to say nothing than to offer misleading guesses.
+ def explainTypes(tp1: Type, tp2: Type) = {
+ if (context.reportErrors)
+ withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
+ }
- (found.resultApprox ne found)
- && isWeaklyCompatible(found.resultApprox, req)
- */
- )
+ // When filtering sym down to the accessible alternatives leaves us empty handed.
+ private def checkAccessibleError(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+ if (settings.debug) {
+ Console.println(context)
+ Console.println(tree)
+ Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
+ }
+ ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
+ if (settings.check.isDefault)
+ analyzer.lastAccessCheckDetails
+ else
+ ptBlock("because of an internal error (no accessible symbol)",
+ "sym.ownerChain" -> sym.ownerChain,
+ "underlyingSymbol(sym)" -> underlyingSymbol(sym),
+ "pre" -> pre,
+ "site" -> site,
+ "tree" -> tree,
+ "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
+ "context.owner" -> context.owner,
+ "context.outer.enclClass.owner" -> context.outer.enclClass.owner
+ )
+ ))(context)
- def explainTypes(tp1: Type, tp2: Type) =
- withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2))
+ setError(tree)
+ }
/* -- Tests & Checks---------------------------------------------------- */
- /** Check that <code>sym</code> is defined and accessible as a member of
- * tree <code>site</code> with type <code>pre</code> in current context.
+ /** Check that `sym` is defined and accessible as a member of
+ * tree `site` with type `pre` in current context.
+ * @PP: In case it's not abundantly obvious to anyone who might read
+ * this, the method does a lot more than "check" these things, as does
+ * nearly every method in the compiler, so don't act all shocked.
+ * This particular example "checks" its way to assigning both the
+ * symbol and type of the incoming tree, in addition to forcing lots
+ * of symbol infos on its way to transforming java raw types (but
+ * only of terms - why?)
*
* Note: pre is not refchecked -- moreover, refchecking the resulting tree may not refcheck pre,
* since pre may not occur in its type (callers should wrap the result in a TypeTreeWithDeferredRefCheck)
*/
- def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree =
- if (sym.isError) {
- tree setSymbol sym setType ErrorType
- } else {
- val topClass = context.owner.enclosingTopLevelClass
- if (context.unit.exists)
- context.unit.depends += sym.enclosingTopLevelClass
-
- var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super]))
- // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG
- if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402
- sym1 = sym
-
- if (sym1 == NoSymbol) {
- if (settings.debug.value) {
- Console.println(context)
- Console.println(tree)
- Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType))
- }
- ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner,
- if (settings.check.isDefault)
- analyzer.lastAccessCheckDetails
- else
- ptBlock("because of an internal error (no accessible symbol)",
- "sym.ownerChain" -> sym.ownerChain,
- "underlyingSymbol(sym)" -> underlyingSymbol(sym),
- "pre" -> pre,
- "site" -> site,
- "tree" -> tree,
- "sym.accessBoundary(sym.owner)" -> sym.accessBoundary(sym.owner),
- "context.owner" -> context.owner,
- "context.outer.enclClass.owner" -> context.outer.enclClass.owner
- )
- ))(context)
- setError(tree)
- }
- else {
- if (context.owner.isTermMacro && (sym1 hasFlag LOCKED)) {
- // we must not let CyclicReference to be thrown from sym1.info
- // because that would mark sym1 erroneous, which it is not
- // but if it's a true CyclicReference then macro def will report it
- // see comments to TypeSigError for an explanation of this special case
- // [Eugene] is there a better way?
- val dummy = new TypeCompleter { val tree = EmptyTree; override def complete(sym: Symbol) {} }
- throw CyclicReference(sym1, dummy)
- }
+ def checkAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): Tree = {
+ def malformed(ex: MalformedType, instance: Type): Type = {
+ val what = if (ex.msg contains "malformed type") "is malformed" else s"contains a ${ex.msg}"
+ val message = s"\n because its instance type $instance $what"
+ val error = AccessError(tree, sym, pre, context.enclClass.owner, message)
+ ErrorUtils.issueTypeError(error)(context)
+ ErrorType
+ }
+ def accessible = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) match {
+ case NoSymbol if sym.isJavaDefined && context.unit.isJava => sym // don't try to second guess Java; see #4402
+ case sym1 => sym1
+ }
+ // XXX So... what's this for exactly?
+ if (context.unit.exists)
+ context.unit.depends += sym.enclosingTopLevelClass
- if (sym1.isTerm)
- sym1.cookJavaRawInfo() // xform java rawtypes into existentials
-
- val owntype = {
- try pre.memberType(sym1)
- catch {
- case ex: MalformedType =>
- if (settings.debug.value) ex.printStackTrace
- val sym2 = underlyingSymbol(sym1)
- val itype = pre.memberType(sym2)
- ErrorUtils.issueTypeError(
- AccessError(tree, sym, pre, context.enclClass.owner,
- "\n because its instance type "+itype+
- (if ("malformed type: "+itype.toString==ex.msg) " is malformed"
- else " contains a "+ex.msg)))(context)
- ErrorType
- }
- }
- tree setSymbol sym1 setType {
+ if (sym.isError)
+ tree setSymbol sym setType ErrorType
+ else accessible match {
+ case NoSymbol => checkAccessibleError(tree, sym, pre, site)
+ case sym if context.owner.isTermMacro && (sym hasFlag LOCKED) => throw CyclicReference(sym, CheckAccessibleMacroCycle)
+ case sym =>
+ val sym1 = if (sym.isTerm) sym.cookJavaRawInfo() else sym // xform java rawtypes into existentials
+ val owntype = (
+ try pre memberType sym1
+ catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) }
+ )
+ tree setSymbol sym1 setType (
pre match {
case _: SuperType => owntype map (tp => if (tp eq pre) site.symbol.thisType else tp)
case _ => owntype
}
- }
- }
+ )
}
-
+ }
/** "Compatible" means conforming after conversions.
* "Raising to a thunk" is not implicit; therefore, for purposes of applicability and
@@ -417,45 +382,38 @@ trait Infer extends Checkable {
* since that induces a tie between m(=>A) and m(=>A,B*) [SI-3761]
*/
private def isCompatible(tp: Type, pt: Type): Boolean = {
- def isCompatibleByName(tp: Type, pt: Type): Boolean = pt match {
- case TypeRef(_, ByNameParamClass, List(res)) if !isByNameParamType(tp) => isCompatible(tp, res)
- case _ => false
- }
+ def isCompatibleByName(tp: Type, pt: Type): Boolean = (
+ isByNameParamType(pt)
+ && !isByNameParamType(tp)
+ && isCompatible(tp, dropByName(pt))
+ )
val tp1 = normalize(tp)
- (tp1 weak_<:< pt) || isCoercible(tp1, pt) || isCompatibleByName(tp, pt)
+
+ ( (tp1 weak_<:< pt)
+ || isCoercible(tp1, pt)
+ || isCompatibleByName(tp, pt)
+ )
}
- def isCompatibleArgs(tps: List[Type], pts: List[Type]) =
- (tps corresponds pts)(isCompatible)
+ def isCompatibleArgs(tps: List[Type], pts: List[Type]) = (tps corresponds pts)(isCompatible)
- def isWeaklyCompatible(tp: Type, pt: Type): Boolean =
- pt.typeSymbol == UnitClass || // can perform unit coercion
- isCompatible(tp, pt) ||
- tp.isInstanceOf[MethodType] && // can perform implicit () instantiation
- tp.params.isEmpty && isCompatible(tp.resultType, pt)
+ def isWeaklyCompatible(tp: Type, pt: Type): Boolean = {
+ def isCompatibleNoParamsMethod = tp match {
+ case MethodType(Nil, restpe) => isCompatible(restpe, pt)
+ case _ => false
+ }
+ ( pt.typeSymbol == UnitClass // can perform unit coercion
+ || isCompatible(tp, pt)
+ || isCompatibleNoParamsMethod // can perform implicit () instantiation
+ )
+ }
- /** Like weakly compatible but don't apply any implicit conversions yet.
+ /* Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
- *
- * [Martin] I think Infer is also created by Erasure, with the default
- * implementation of isCoercible
- * [Paulp] (Assuming the above must refer to my comment on isCoercible)
- * Nope, I examined every occurrence of Inferencer in trunk. It
- * appears twice as a self-type, once at its definition, and once
- * where it is instantiated in Typers. There are no others.
- *
- % ack -A0 -B0 --no-filename '\bInferencer\b' src
- self: Inferencer =>
- self: Inferencer =>
- class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
- val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
- /** This is overridden in the Typer.infer with some logic, but since
- * that's the only place in the compiler an Inferencer is ever created,
- * I suggest this should either be abstract or have the implementation.
- */
+ // Overridden at the point of instantiation, where inferView is visible.
def isCoercible(tp: Type, pt: Type): Boolean = false
/* -- Type instantiation------------------------------------------------ */
@@ -464,112 +422,99 @@ trait Infer extends Checkable {
* by existentially bound variables.
*/
def makeFullyDefined(tp: Type): Type = {
- val tparams = new ListBuffer[Symbol]
+ var tparams: List[Symbol] = Nil
def addTypeParam(bounds: TypeBounds): Type = {
val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds
- tparams += tparam
+ tparams ::= tparam
tparam.tpe
}
val tp1 = tp map {
- case WildcardType =>
- addTypeParam(TypeBounds.empty)
- case BoundedWildcardType(bounds) =>
- addTypeParam(bounds)
- case t => t
+ case WildcardType => addTypeParam(TypeBounds.empty)
+ case BoundedWildcardType(bounds) => addTypeParam(bounds)
+ case t => t
}
- existentialAbstraction(tparams.toList, tp1)
+ if (tp eq tp1) tp
+ else existentialAbstraction(tparams.reverse, tp1)
}
+ def ensureFullyDefined(tp: Type): Type = if (isFullyDefined(tp)) tp else makeFullyDefined(tp)
/** Return inferred type arguments of polymorphic expression, given
- * its type parameters and result type and a prototype <code>pt</code>.
- * If no minimal type variables exist that make the
- * instantiated type a subtype of <code>pt</code>, return null.
- *
- * @param tparams ...
- * @param restpe ...
- * @param pt ...
- * @return ...
+ * type vars, its type parameters and result type and a prototype `pt`.
+ * If the type variables cannot be instantiated such that the type
+ * conforms to `pt`, return null.
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean = false): (List[Type], List[TypeVar]) = {
- val tvars = tparams map freshVar
- val instResTp = restpe.instantiateTypeParams(tparams, tvars)
- if ( if (useWeaklyCompatible) isWeaklyCompatible(instResTp, pt) else isCompatible(instResTp, pt) ) {
- try {
- // If the restpe is an implicit method, and the expected type is fully defined
- // optimize type variables wrt to the implicit formals only; ignore the result type.
- // See test pos/jesper.scala
- val varianceType = restpe match {
- case mt: MethodType if mt.isImplicit && isFullyDefined(pt) =>
- MethodType(mt.params, AnyClass.tpe)
- case _ =>
- restpe
- }
- //println("try to solve "+tvars+" "+tparams)
- (solvedTypes(tvars, tparams, tparams map varianceInType(varianceType),
- false, lubDepth(List(restpe, pt))), tvars)
- } catch {
- case ex: NoInstance => (null, null)
- }
- } else (null, null)
+ private def exprTypeArgs(tvars: List[TypeVar], tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] = {
+ def restpeInst = restpe.instantiateTypeParams(tparams, tvars)
+ def conforms = if (useWeaklyCompatible) isWeaklyCompatible(restpeInst, pt) else isCompatible(restpeInst, pt)
+ // If the restpe is an implicit method, and the expected type is fully defined
+ // optimize type variables wrt to the implicit formals only; ignore the result type.
+ // See test pos/jesper.scala
+ def variance = restpe match {
+ case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe)
+ case _ => restpe
+ }
+ def solve() = solvedTypes(tvars, tparams, tparams map varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil))
+
+ if (conforms)
+ try solve() catch { case _: NoInstance => null }
+ else
+ null
}
+ /** Overload which allocates fresh type vars.
+ * The other one exists because apparently inferExprInstance needs access to the typevars
+ * after the call, and its wasteful to return a tuple and throw it away almost every time.
+ */
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, useWeaklyCompatible: Boolean): List[Type] =
+ exprTypeArgs(tparams map freshVar, tparams, restpe, pt, useWeaklyCompatible)
/** Return inferred proto-type arguments of function, given
* its type and value parameters and result type, and a
- * prototype <code>pt</code> for the function result.
+ * prototype `pt` for the function result.
* Type arguments need to be either determined precisely by
* the prototype, or they are maximized, if they occur only covariantly
* in the value parameter list.
* If instantiation of a type parameter fails,
* take WildcardType for the proto-type argument.
- *
- * @param tparams ...
- * @param formals ...
- * @param restype ...
- * @param pt ...
- * @return ...
*/
- def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type,
- pt: Type): List[Type] = {
- /** Map type variable to its instance, or, if `variance` is covariant/contravariant,
- * to its upper/lower bound */
- def instantiateToBound(tvar: TypeVar, variance: Int): Type = try {
+ def protoTypeArgs(tparams: List[Symbol], formals: List[Type], restpe: Type, pt: Type): List[Type] = {
+ // Map type variable to its instance, or, if `variance` is variant,
+ // to its upper or lower bound
+ def instantiateToBound(tvar: TypeVar, variance: Variance): Type = {
lazy val hiBounds = tvar.constr.hiBounds
lazy val loBounds = tvar.constr.loBounds
- lazy val upper = glb(hiBounds)
- lazy val lower = lub(loBounds)
+ lazy val upper = glb(hiBounds)
+ lazy val lower = lub(loBounds)
def setInst(tp: Type): Type = {
tvar setInst tp
- assertNonCyclic(tvar)//debug
+ assert(tvar.constr.inst != tvar, tvar.origin)
instantiate(tvar.constr.inst)
}
- //Console.println("instantiate "+tvar+tvar.constr+" variance = "+variance);//DEBUG
- if (tvar.constr.inst != NoType)
+ if (tvar.constr.instValid)
instantiate(tvar.constr.inst)
- else if ((variance & COVARIANT) != 0 && hiBounds.nonEmpty)
- setInst(upper)
- else if ((variance & CONTRAVARIANT) != 0 && loBounds.nonEmpty)
+ else if (loBounds.nonEmpty && variance.isContravariant)
setInst(lower)
- else if (hiBounds.nonEmpty && loBounds.nonEmpty && upper <:< lower)
+ else if (hiBounds.nonEmpty && (variance.isPositive || loBounds.nonEmpty && upper <:< lower))
setInst(upper)
else
WildcardType
- } catch {
- case ex: NoInstance => WildcardType
}
+
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
map2(tparams, tvars)((tparam, tvar) =>
- instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
+ try instantiateToBound(tvar, varianceInTypes(formals)(tparam))
+ catch { case ex: NoInstance => WildcardType }
+ )
else
- tvars map (tvar => WildcardType)
+ tvars map (_ => WildcardType)
}
/** [Martin] Can someone comment this please? I have no idea what it's for
* and the code is not exactly readable.
*/
object AdjustedTypeArgs {
- val Result = scala.collection.mutable.LinkedHashMap
- type Result = scala.collection.mutable.LinkedHashMap[Symbol, Option[Type]]
+ val Result = mutable.LinkedHashMap
+ type Result = mutable.LinkedHashMap[Symbol, Option[Type]]
def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists(
(m collect {case (p, Some(a)) => (p, a)}).unzip ))
@@ -586,7 +531,7 @@ trait Infer extends Checkable {
def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{
val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null)
val (okArgs, okTparams) = ok.unzip
- (okArgs, okTparams, m.values.map(_.getOrElse(NothingClass.tpe)), nok.keys)
+ (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys)
})
}
@@ -608,7 +553,7 @@ trait Infer extends Checkable {
*
* Rewrite for repeated param types: Map T* entries to Seq[T].
* @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined
- * type parameters that are inferred as `scala.Nothing` and that are not covariant in <code>restpe</code> are taken to be undetermined
+ * type parameters that are inferred as `scala.Nothing` and that are not covariant in `restpe` are taken to be undetermined
*/
def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = {
val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]]
@@ -616,33 +561,32 @@ trait Infer extends Checkable {
foreach3(tparams, tvars, targs) { (tparam, tvar, targ) =>
val retract = (
targ.typeSymbol == NothingClass // only retract Nothings
- && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
+ && (restpe.isWildcard || !varianceInType(restpe)(tparam).isPositive) // don't retract covariant occurrences
)
- // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat
buf += ((tparam,
if (retract) None
else Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
// this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
- else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ
+ else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ
else targ.widen
)
))
}
- buf.result
+ buf.result()
}
/** Return inferred type arguments, given type parameters, formal parameters,
* argument types, result type and expected result type.
- * If this is not possible, throw a <code>NoInstance</code> exception.
- * Undetermined type arguments are represented by `definitions.NothingClass.tpe`.
+ * If this is not possible, throw a `NoInstance` exception.
+ * Undetermined type arguments are represented by `definitions.NothingTpe`.
* No check that inferred parameters conform to their bounds is made here.
*
* @param tparams the type parameters of the method
* @param formals the value parameter types of the method
- * @param restp the result type of the method
+ * @param restpe the result type of the method
* @param argtpes the argument types of the application
* @param pt the expected return type of the application
* @return @see adjustTypeArgs
@@ -691,33 +635,71 @@ trait Infer extends Checkable {
}
val targs = solvedTypes(
tvars, tparams, tparams map varianceInTypes(formals),
- false, lubDepth(formals) max lubDepth(argtpes)
+ upper = false, lubDepth(formals) max lubDepth(argtpes)
)
+ // Can warn about inferring Any/AnyVal as long as they don't appear
+ // explicitly anywhere amongst the formal, argument, result, or expected type.
+ def canWarnAboutAny = !(pt :: restpe :: formals ::: argtpes exists (t => (t contains AnyClass) || (t contains AnyValClass)))
+ def argumentPosition(idx: Int): Position = context.tree match {
+ case x: ValOrDefDef => x.rhs match {
+ case Apply(fn, args) if idx < args.size => args(idx).pos
+ case _ => context.tree.pos
+ }
+ case _ => context.tree.pos
+ }
+ if (settings.warnInferAny.value && context.reportErrors && canWarnAboutAny) {
+ foreachWithIndex(targs) ((targ, idx) =>
+ targ.typeSymbol match {
+ case sym @ (AnyClass | AnyValClass) =>
+ context.unit.warning(argumentPosition(idx), s"a type was inferred to be `${sym.name}`; this may indicate a programming error.")
+ case _ =>
+ }
+ )
+ }
adjustTypeArgs(tparams, tvars, targs, restpe)
}
+ /** One must step carefully when assessing applicability due to
+ * complications from varargs, tuple-conversion, named arguments.
+ * This method is used to filter out inapplicable methods,
+ * its behavior slightly configurable based on what stage of
+ * overloading resolution we're at.
+ *
+ * This method has boolean parameters, which is usually suboptimal
+ * but I didn't work out a better way. They don't have defaults,
+ * and the method's scope is limited.
+ */
+ private[typechecker] def isApplicableBasedOnArity(tpe: Type, argsCount: Int, varargsStar: Boolean, tuplingAllowed: Boolean): Boolean = followApply(tpe) match {
+ case OverloadedType(pre, alts) =>
+ alts exists (alt => isApplicableBasedOnArity(pre memberType alt, argsCount, varargsStar, tuplingAllowed))
+ case _ =>
+ val paramsCount = tpe.params.length
+ val simpleMatch = paramsCount == argsCount
+ val varargsTarget = isVarArgsList(tpe.params)
+ def varargsMatch = varargsTarget && (paramsCount - 1) <= argsCount
+ def tuplingMatch = tuplingAllowed && eligibleForTupleConversion(paramsCount, argsCount, varargsTarget)
+
+ // A varargs star call, e.g. (x, y:_*) can only match a varargs method
+ // with the same number of parameters. See SI-5859 for an example of what
+ // would fail were this not enforced before we arrived at isApplicable.
+ if (varargsStar)
+ varargsTarget && simpleMatch
+ else
+ simpleMatch || varargsMatch || tuplingMatch
+ }
+
private[typechecker] def followApply(tp: Type): Type = tp match {
case NullaryMethodType(restp) =>
val restp1 = followApply(restp)
if (restp1 eq restp) tp else restp1
case _ =>
- val appmeth = {
- //OPT cut down on #closures by special casing non-overloaded case
- // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
- val result = tp.nonPrivateMember(nme.apply)
- if ((result eq NoSymbol) || !result.isOverloaded && result.isPublic) result
- else result filter (_.isPublic)
+ //OPT cut down on #closures by special casing non-overloaded case
+ // was: tp.nonPrivateMember(nme.apply) filter (_.isPublic)
+ tp nonPrivateMember nme.apply match {
+ case NoSymbol => tp
+ case sym if !sym.isOverloaded && sym.isPublic => OverloadedType(tp, sym.alternatives)
+ case sym => OverloadedType(tp, sym filter (_.isPublic) alternatives)
}
- if (appmeth == NoSymbol) tp
- else OverloadedType(tp, appmeth.alternatives)
- }
-
- def hasExactlyNumParams(tp: Type, n: Int): Boolean = tp match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => hasExactlyNumParams(pre.memberType(alt), n))
- case _ =>
- val len = tp.params.length
- len == n || isVarArgsList(tp.params) && len <= n + 1
}
/**
@@ -731,7 +713,7 @@ trait Infer extends Checkable {
* to the corresponding position in params
* - namesOK is false when there's an invalid use of named arguments
*/
- private def checkNames(argtpes: List[Type], params: List[Symbol]) = {
+ private def checkNames(argtpes: List[Type], params: List[Symbol]): (List[Type], Array[Int], Boolean) = {
val argPos = Array.fill(argtpes.length)(-1)
var positionalAllowed, namesOK = true
var index = 0
@@ -743,7 +725,7 @@ trait Infer extends Checkable {
if (pos == -1) {
if (positionalAllowed) { // treat assignment as positional argument
argPos(index) = index
- res = UnitClass.tpe
+ res = UnitTpe
} else // unknown parameter name
namesOK = false
} else if (argPos.contains(pos)) { // parameter specified twice
@@ -765,207 +747,190 @@ trait Infer extends Checkable {
(argtpes1, argPos, namesOK)
}
- /** don't do a () to (()) conversion for methods whose second parameter
- * is a varargs. This is a fairly kludgey way to address #3224.
- * We'll probably find a better way to do this by identifying
- * tupled and n-ary methods, but thiws is something for a future major revision.
+ /** True if the given parameter list can accept a tupled argument list,
+ * and the argument list can be tupled (based on its length.)
+ */
+ def eligibleForTupleConversion(paramsCount: Int, argsCount: Int, varargsTarget: Boolean): Boolean = {
+ def canSendTuple = argsCount match {
+ case 0 => !varargsTarget // avoid () to (()) conversion - SI-3224
+ case 1 => false // can't tuple a single argument
+ case n => n <= MaxTupleArity // <= 22 arguments
+ }
+ def canReceiveTuple = paramsCount match {
+ case 1 => true
+ case 2 => varargsTarget
+ case _ => false
+ }
+ canSendTuple && canReceiveTuple
+ }
+ def eligibleForTupleConversion(formals: List[Type], argsCount: Int): Boolean = formals match {
+ case p :: Nil => eligibleForTupleConversion(1, argsCount, varargsTarget = isScalaRepeatedParamType(p))
+ case _ :: p :: Nil if isScalaRepeatedParamType(p) => eligibleForTupleConversion(2, argsCount, varargsTarget = true)
+ case _ => false
+ }
+
+ /** The type of an argument list after being coerced to a tuple.
+ * @pre: the argument list is eligible for tuple conversion.
*/
- def isUnitForVarArgs(args: List[AnyRef], params: List[Symbol]): Boolean =
- args.isEmpty && hasLength(params, 2) && isVarArgsList(params)
+ private def typeAfterTupleConversion(argtpes: List[Type]): Type = (
+ if (argtpes.isEmpty) UnitTpe // aka "Tuple0"
+ else tupleType(argtpes map {
+ case NamedType(name, tp) => UnitTpe // not a named arg - only assignments here
+ case RepeatedType(tp) => tp // but probably shouldn't be tupling a call containing :_*
+ case tp => tp
+ })
+ )
+
+ /** If the argument list needs to be tupled for the parameter list,
+ * a list containing the type of the tuple. Otherwise, the original
+ * argument list.
+ */
+ def tupleIfNecessary(formals: List[Type], argtpes: List[Type]): List[Type] = {
+ if (eligibleForTupleConversion(formals, argtpes.size))
+ typeAfterTupleConversion(argtpes) :: Nil
+ else
+ argtpes
+ }
+
+ private def isApplicableToMethod(undetparams: List[Symbol], mt: MethodType, argtpes0: List[Type], pt: Type): Boolean = {
+ val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
+ def missingArgs = missingParams[Type](argtpes0, mt.params, x => Some(x) collect { case NamedType(n, _) => n })
+ def argsTupled = tupleIfNecessary(mt.paramTypes, argtpes0)
+ def argsPlusDefaults = missingArgs match {
+ case (args, _) if args forall (_.hasDefault) => argtpes0 ::: makeNamedTypes(args)
+ case _ => argsTupled
+ }
+ // If args eq the incoming arg types, fail; otherwise recurse with these args.
+ def tryWithArgs(args: List[Type]) = (
+ (args ne argtpes0)
+ && isApplicable(undetparams, mt, args, pt)
+ )
+ def tryInstantiating(args: List[Type]) = falseIfNoInstance {
+ val restpe = mt resultType args
+ val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, args, pt)
+ val restpeInst = restpe.instantiateTypeParams(okparams, okargs)
+ // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
+ exprTypeArgs(leftUndet, restpeInst, pt, useWeaklyCompatible = true) match {
+ case null => false
+ case _ => isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
+ }
+ }
+ def typesCompatible(args: List[Type]) = undetparams match {
+ case Nil => isCompatibleArgs(args, formals) && isWeaklyCompatible(mt resultType args, pt)
+ case _ => tryInstantiating(args)
+ }
+
+ // when using named application, the vararg param has to be specified exactly once
+ def reorderedTypesCompatible = checkNames(argtpes0, mt.params) match {
+ case (_, _, false) => false // names are not ok
+ case (_, pos, _) if !allArgsArePositional(pos) && !sameLength(formals, mt.params) => false // different length lists and all args not positional
+ case (args, pos, _) => typesCompatible(reorderArgs(args, pos))
+ }
+ compareLengths(argtpes0, formals) match {
+ case 0 if containsNamedType(argtpes0) => reorderedTypesCompatible // right number of args, wrong order
+ case 0 => typesCompatible(argtpes0) // fast track if no named arguments are used
+ case x if x > 0 => tryWithArgs(argsTupled) // too many args, try tupling
+ case _ => tryWithArgs(argsPlusDefaults) // too few args, try adding defaults or tupling
+ }
+ }
- /** Is there an instantiation of free type variables <code>undetparams</code>
- * such that function type <code>ftpe</code> is applicable to
- * <code>argtpes</code> and its result conform to <code>pt</code>?
+ /** Is there an instantiation of free type variables `undetparams` such that
+ * function type `ftpe` is applicable to `argtpes0` and its result conform to `pt`?
*
- * @param undetparams ...
* @param ftpe the type of the function (often a MethodType)
- * @param argtpes the argument types; a NamedType(name, tp) for named
+ * @param argtpes0 the argument types; a NamedType(name, tp) for named
* arguments. For each NamedType, if `name` does not exist in `ftpe`, that
* type is set to `Unit`, i.e. the corresponding argument is treated as
* an assignment expression (@see checkNames).
- * @param pt ...
- * @return ...
*/
- private def isApplicable(undetparams: List[Symbol], ftpe: Type,
- argtpes0: List[Type], pt: Type): Boolean =
+ private def isApplicable(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = (
ftpe match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => isApplicable(undetparams, pre.memberType(alt), argtpes0, pt))
- case ExistentialType(tparams, qtpe) =>
- isApplicable(undetparams, qtpe, argtpes0, pt)
- case mt @ MethodType(params, _) =>
- val formals = formalTypes(mt.paramTypes, argtpes0.length, removeByName = false)
-
- def tryTupleApply: Boolean = {
- // if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0
- val tupleArgTpes = actualTypes(argtpes0 map {
- // no assignment is treated as named argument here
- case NamedType(name, tp) => UnitClass.tpe
- case tp => tp
- }, formals.length)
-
- !sameLength(argtpes0, tupleArgTpes) &&
- !isUnitForVarArgs(argtpes0, params) &&
- isApplicable(undetparams, ftpe, tupleArgTpes, pt)
- }
- def typesCompatible(argtpes: List[Type]) = {
- val restpe = ftpe.resultType(argtpes)
- if (undetparams.isEmpty) {
- isCompatibleArgs(argtpes, formals) && isWeaklyCompatible(restpe, pt)
- } else {
- try {
- val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(undetparams, formals, restpe, argtpes, pt)
- // #2665: must use weak conformance, not regular one (follow the monomorphic case above)
- (exprTypeArgs(leftUndet, restpe.instantiateTypeParams(okparams, okargs), pt, useWeaklyCompatible = true)._1 ne null) &&
- isWithinBounds(NoPrefix, NoSymbol, okparams, okargs)
- } catch {
- case ex: NoInstance => false
- }
- }
- }
-
- // very similar logic to doTypedApply in typechecker
- val lencmp = compareLengths(argtpes0, formals)
- if (lencmp > 0) tryTupleApply
- else if (lencmp == 0) {
- if (!argtpes0.exists(_.isInstanceOf[NamedType])) {
- // fast track if no named arguments are used
- typesCompatible(argtpes0)
- }
- else {
- // named arguments are used
- val (argtpes1, argPos, namesOK) = checkNames(argtpes0, params)
- // when using named application, the vararg param has to be specified exactly once
- ( namesOK && (isIdentity(argPos) || sameLength(formals, params)) &&
- // nb. arguments and names are OK, check if types are compatible
- typesCompatible(reorderArgs(argtpes1, argPos))
- )
- }
- }
- else {
- // not enough arguments, check if applicable using defaults
- val missing = missingParams[Type](argtpes0, params, {
- case NamedType(name, _) => Some(name)
- case _ => None
- })._1
- if (missing forall (_.hasDefault)) {
- // add defaults as named arguments
- val argtpes1 = argtpes0 ::: (missing map (p => NamedType(p.name, p.tpe)))
- isApplicable(undetparams, ftpe, argtpes1, pt)
- }
- else tryTupleApply
- }
-
- case NullaryMethodType(restpe) => // strip nullary method type, which used to be done by the polytype case below
- isApplicable(undetparams, restpe, argtpes0, pt)
- case PolyType(tparams, restpe) =>
- createFromClonedSymbols(tparams, restpe)((tps1, restpe1) => isApplicable(tps1 ::: undetparams, restpe1, argtpes0, pt))
- case ErrorType =>
- true
- case _ =>
- false
+ case OverloadedType(pre, alts) => alts exists (alt => isApplicable(undetparams, pre memberType alt, argtpes0, pt))
+ case ExistentialType(_, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt)
+ case mt @ MethodType(_, _) => isApplicableToMethod(undetparams, mt, argtpes0, pt)
+ case NullaryMethodType(restpe) => isApplicable(undetparams, restpe, argtpes0, pt)
+ case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, restpe)((tps1, res1) => isApplicable(tps1 ::: undetparams, res1, argtpes0, pt))
+ case ErrorType => true
+ case _ => false
}
+ )
/**
- * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
- * The chance of TypeErrors should be reduced through context errors
+ * Are arguments of the given types applicable to `ftpe`? Type argument inference
+ * is tried twice: firstly with the given expected type, and secondly with `WildcardType`.
*/
- private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type,
- argtpes0: List[Type], pt: Type): Boolean = {
- val silentContext = context.makeSilent(false)
- val typer0 = newTyper(silentContext)
- val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt)
- if (pt != WildcardType && silentContext.hasErrors) {
- silentContext.flushBuffer()
- val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType)
- if (silentContext.hasErrors) false else res2
- } else res1
+ // Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors).
+ // The chance of TypeErrors should be reduced through context errors
+ private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = {
+ def applicableExpectingPt(pt: Type): Boolean = {
+ val silent = context.makeSilent(reportAmbiguousErrors = false)
+ val result = newTyper(silent).infer.isApplicable(undetparams, ftpe, argtpes0, pt)
+ if (silent.hasErrors && !pt.isWildcard)
+ applicableExpectingPt(WildcardType) // second try
+ else
+ result
+ }
+ applicableExpectingPt(pt)
}
- /** Is type <code>ftpe1</code> strictly more specific than type <code>ftpe2</code>
+ /** Is type `ftpe1` strictly more specific than type `ftpe2`
* when both are alternatives in an overloaded function?
* @see SLS (sec:overloading-resolution)
- *
- * @param ftpe1 ...
- * @param ftpe2 ...
- * @return ...
*/
- def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
- case OverloadedType(pre, alts) =>
- alts exists (alt => isAsSpecific(pre.memberType(alt), ftpe2))
- case et: ExistentialType =>
- isAsSpecific(ftpe1.skolemizeExistential, ftpe2)
- //et.withTypeVars(isAsSpecific(_, ftpe2))
- case NullaryMethodType(res) =>
- isAsSpecific(res, ftpe2)
- case mt: MethodType if mt.isImplicit =>
- isAsSpecific(ftpe1.resultType, ftpe2)
- case mt @ MethodType(params, _) if params.nonEmpty =>
- var argtpes = mt.paramTypes
- if (isVarArgsList(params) && isVarArgsList(ftpe2.params))
- argtpes = argtpes map (argtpe =>
- if (isRepeatedParamType(argtpe)) argtpe.typeArgs.head else argtpe)
- isApplicable(List(), ftpe2, argtpes, WildcardType)
- case PolyType(tparams, NullaryMethodType(res)) =>
- isAsSpecific(PolyType(tparams, res), ftpe2)
- case PolyType(tparams, mt: MethodType) if mt.isImplicit =>
- isAsSpecific(PolyType(tparams, mt.resultType), ftpe2)
- case PolyType(_, (mt @ MethodType(params, _))) if params.nonEmpty =>
- isApplicable(List(), ftpe2, mt.paramTypes, WildcardType)
- // case NullaryMethodType(res) =>
- // isAsSpecific(res, ftpe2)
- case ErrorType =>
- true
- case _ =>
- ftpe2 match {
- case OverloadedType(pre, alts) =>
- alts forall (alt => isAsSpecific(ftpe1, pre.memberType(alt)))
- case et: ExistentialType =>
- et.withTypeVars(isAsSpecific(ftpe1, _))
- case mt: MethodType =>
- !mt.isImplicit || isAsSpecific(ftpe1, mt.resultType)
- case NullaryMethodType(res) =>
- isAsSpecific(ftpe1, res)
- case PolyType(tparams, NullaryMethodType(res)) =>
- isAsSpecific(ftpe1, PolyType(tparams, res))
- case PolyType(tparams, mt: MethodType) =>
- !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, mt.resultType))
- case _ =>
- isAsSpecificValueType(ftpe1, ftpe2, List(), List())
- }
+ def isAsSpecific(ftpe1: Type, ftpe2: Type): Boolean = {
+ def checkIsApplicable(argtpes: List[Type]) = isApplicable(Nil, ftpe2, argtpes, WildcardType)
+ def bothAreVarargs = isVarArgsList(ftpe1.params) && isVarArgsList(ftpe2.params)
+ def onRight = ftpe2 match {
+ case OverloadedType(pre, alts) => alts forall (alt => isAsSpecific(ftpe1, pre memberType alt))
+ case et: ExistentialType => et.withTypeVars(isAsSpecific(ftpe1, _))
+ case mt @ MethodType(_, restpe) => !mt.isImplicit || isAsSpecific(ftpe1, restpe)
+ case NullaryMethodType(res) => isAsSpecific(ftpe1, res)
+ case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(ftpe1, PolyType(tparams, restpe))
+ case PolyType(tparams, mt @ MethodType(_, restpe)) => !mt.isImplicit || isAsSpecific(ftpe1, PolyType(tparams, restpe))
+ case _ => isAsSpecificValueType(ftpe1, ftpe2, Nil, Nil)
+ }
+ ftpe1 match {
+ case OverloadedType(pre, alts) => alts exists (alt => isAsSpecific(pre memberType alt, ftpe2))
+ case et: ExistentialType => isAsSpecific(et.skolemizeExistential, ftpe2)
+ case NullaryMethodType(restpe) => isAsSpecific(restpe, ftpe2)
+ case mt @ MethodType(_, restpe) if mt.isImplicit => isAsSpecific(restpe, ftpe2)
+ case mt @ MethodType(_, _) if bothAreVarargs => checkIsApplicable(mt.paramTypes mapConserve repeatedToSingle)
+ case mt @ MethodType(params, _) if params.nonEmpty => checkIsApplicable(mt.paramTypes)
+ case PolyType(tparams, NullaryMethodType(restpe)) => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+ case PolyType(tparams, mt @ MethodType(_, restpe)) if mt.isImplicit => isAsSpecific(PolyType(tparams, restpe), ftpe2)
+ case PolyType(_, mt @ MethodType(params, _)) if params.nonEmpty => checkIsApplicable(mt.paramTypes)
+ case ErrorType => true
+ case _ => onRight
+ }
}
- private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = (tpe1, tpe2) match {
- case (PolyType(tparams1, rtpe1), _) =>
+ private def isAsSpecificValueType(tpe1: Type, tpe2: Type, undef1: List[Symbol], undef2: List[Symbol]): Boolean = tpe1 match {
+ case PolyType(tparams1, rtpe1) =>
isAsSpecificValueType(rtpe1, tpe2, undef1 ::: tparams1, undef2)
- case (_, PolyType(tparams2, rtpe2)) =>
- isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
- case _ =>
- existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+ case _ =>
+ tpe2 match {
+ case PolyType(tparams2, rtpe2) => isAsSpecificValueType(tpe1, rtpe2, undef1, undef2 ::: tparams2)
+ case _ => existentialAbstraction(undef1, tpe1) <:< existentialAbstraction(undef2, tpe2)
+ }
}
-
-/*
- def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type): Boolean =
- ftpe1.isError || isAsSpecific(ftpe1, ftpe2) &&
- (!isAsSpecific(ftpe2, ftpe1) ||
- !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
- phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
-*/
/** Is sym1 (or its companion class in case it is a module) a subclass of
* sym2 (or its companion class in case it is a module)?
*/
def isProperSubClassOrObject(sym1: Symbol, sym2: Symbol): Boolean = (
- (sym1 != sym2) && (sym1 != NoSymbol) && (
- (sym1 isSubClass sym2)
- || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
- || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
- )
+ (sym1 ne sym2)
+ && (sym1 ne NoSymbol)
+ && ( (sym1 isSubClass sym2)
+ || (sym1.isModuleClass && isProperSubClassOrObject(sym1.linkedClassOfClass, sym2))
+ || (sym2.isModuleClass && isProperSubClassOrObject(sym1, sym2.linkedClassOfClass))
+ )
)
/** is symbol `sym1` defined in a proper subclass of symbol `sym2`?
*/
- def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) =
- sym2 == NoSymbol || isProperSubClassOrObject(sym1.owner, sym2.owner)
+ def isInProperSubClassOrObject(sym1: Symbol, sym2: Symbol) = (
+ (sym2 eq NoSymbol)
+ || isProperSubClassOrObject(sym1.safeOwner, sym2.owner)
+ )
def isStrictlyMoreSpecific(ftpe1: Type, ftpe2: Type, sym1: Symbol, sym2: Symbol): Boolean = {
// ftpe1 / ftpe2 are OverloadedTypes (possibly with one single alternative) if they
@@ -978,92 +943,36 @@ trait Infer extends Checkable {
(!phase.erasedTypes || covariantReturnOverride(ftpe1, ftpe2))) 1 else 0)
val subClassCount = (if (isInProperSubClassOrObject(sym1, sym2)) 1 else 0) -
(if (isInProperSubClassOrObject(sym2, sym1)) 1 else 0)
-// println("is more specific? "+sym1+":"+ftpe1+sym1.locationString+"/"+sym2+":"+ftpe2+sym2.locationString+":"+
-// specificCount+"/"+subClassCount)
specificCount + subClassCount > 0
}
}
-/*
- ftpe1.isError || {
- if (isAsSpecific(ftpe1, ftpe2))
- (!isAsSpecific(ftpe2, ftpe1) ||
- isProperSubClassOrObject(sym1.owner, sym2.owner) ||
- !ftpe1.isInstanceOf[OverloadedType] && ftpe2.isInstanceOf[OverloadedType] ||
- phase.erasedTypes && covariantReturnOverride(ftpe1, ftpe2))
- else
- !isAsSpecific(ftpe2, ftpe1) &&
- isProperSubClassOrObject(sym1.owner, sym2.owner)
- }
-*/
- private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = (ftpe1, ftpe2) match {
- case (MethodType(_, rtpe1), MethodType(_, rtpe2)) =>
- rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
- case _ =>
- false
- }
-/*
- /** Is type `tpe1` a strictly better expression alternative than type `tpe2`?
- */
- def isStrictlyBetterExpr(tpe1: Type, tpe2: Type) = {
- isMethod(tpe2) && !isMethod(tpe1) ||
- isNullary(tpe1) && !isNullary(tpe2) ||
- isStrictlyBetter(tpe1, tpe2)
- }
-
- /** Is type `tpe1` a strictly better alternative than type `tpe2`?
- * non-methods are always strictly better than methods
- * nullary methods are always strictly better than non-nullary
- * if both are non-nullary methods, then tpe1 is strictly better than tpe2 if
- * - tpe1 specializes tpe2 and tpe2 does not specialize tpe1
- * - tpe1 and tpe2 specialize each other and tpe1 has a strictly better resulttype than
- * tpe2
- */
- def isStrictlyBetter(tpe1: Type, tpe2: Type) = {
- def isNullary(tpe: Type): Boolean = tpe match {
- case tp: RewrappingTypeProxy => isNullary(tp.underlying)
- case _ => tpe.paramSectionCount == 0 || tpe.params.isEmpty
- }
- def isMethod(tpe: Type): Boolean = tpe match {
- case tp: RewrappingTypeProxy => isMethod(tp.underlying)
- case MethodType(_, _) | PolyType(_, _) => true
- case _ => false
- }
- def hasStrictlyBetterResult =
- resultIsBetter(tpe1, tpe2, List(), List()) && !resultIsBetter(tpe2, tpe1, List(), List())
- if (!isMethod(tpe1))
- isMethod(tpe2) || hasStrictlyBetterResult
- isNullary(tpe1) && !isNullary(tpe2) ||
- is
-
- else if (isNullary(tpe1))
- isMethod(tpe2) && (!isNullary(tpe2) || hasStrictlyBetterResult)
- else
- specializes(tpe1, tpe2) && (!specializes(tpe2, tpe1) || hasStrictlyBetterResult)
+ private def covariantReturnOverride(ftpe1: Type, ftpe2: Type): Boolean = ftpe1 match {
+ case MethodType(_, rtpe1) =>
+ ftpe2 match {
+ case MethodType(_, rtpe2) => rtpe1 <:< rtpe2 || rtpe2.typeSymbol == ObjectClass
+ case _ => false
+ }
+ case _ => false
}
-*/
/** error if arguments not within bounds. */
- def checkBounds(tree: Tree, pre: Type, owner: Symbol,
- tparams: List[Symbol], targs: List[Type], prefix: String): Boolean =
- if ((targs exists (_.isErroneous)) || (tparams exists (_.isErroneous))) true
- else {
- //@M validate variances & bounds of targs wrt variances & bounds of tparams
- //@M TODO: better place to check this?
- //@M TODO: errors for getters & setters are reported separately
- val kindErrors = checkKindBounds(tparams, targs, pre, owner)
- kindErrors match {
- case Nil =>
- def notWithinBounds() = NotWithinBounds(tree, prefix, targs, tparams, Nil)
- isWithinBounds(pre, owner, tparams, targs) || {notWithinBounds(); false}
- case errors =>
- def kindBoundErrors() = KindBoundErrors(tree, prefix, targs, tparams, errors)
- (targs contains WildcardType) || {kindBoundErrors(); false}
- }
+ def checkBounds(tree: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type], prefix: String): Boolean = {
+ def issueBoundsError() = { NotWithinBounds(tree, prefix, targs, tparams, Nil) ; false }
+ def issueKindBoundErrors(errs: List[String]) = { KindBoundErrors(tree, prefix, targs, tparams, errs) ; false }
+ //@M validate variances & bounds of targs wrt variances & bounds of tparams
+ //@M TODO: better place to check this?
+ //@M TODO: errors for getters & setters are reported separately
+ def check() = checkKindBounds(tparams, targs, pre, owner) match {
+ case Nil => isWithinBounds(pre, owner, tparams, targs) || issueBoundsError()
+ case errs => (targs contains WildcardType) || issueKindBoundErrors(errs)
}
+ targs.exists(_.isErroneous) || tparams.exists(_.isErroneous) || check()
+ }
+
def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = {
- checkKindBounds0(tparams, targs, pre, owner, true) map {
+ checkKindBounds0(tparams, targs, pre, owner, explainErrors = true) map {
case (targ, tparam, kindErrors) =>
kindErrors.errorMessage(targ, tparam)
}
@@ -1087,9 +996,9 @@ trait Infer extends Checkable {
"lenientPt" -> lenientPt
)
)
- var targs = exprTypeArgs(undetparams, tree.tpe, strictPt)._1
+ var targs = exprTypeArgs(undetparams, tree.tpe, strictPt, useWeaklyCompatible = false)
if ((targs eq null) || !(tree.tpe.subst(undetparams, targs) <:< strictPt))
- targs = exprTypeArgs(undetparams, tree.tpe, lenientPt)._1
+ targs = exprTypeArgs(undetparams, tree.tpe, lenientPt, useWeaklyCompatible = false)
substExpr(tree, undetparams, targs, lenientPt)
printInference("[inferArgumentInstance] finished, targs = " + targs)
@@ -1101,8 +1010,9 @@ trait Infer extends Checkable {
* If passed, infers against specified type `treeTp` instead of `tree.tp`.
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
- val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
- val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
+ val treeTp = if (treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+ val tvars = tparams map freshVar
+ val targs = exprTypeArgs(tvars, tparams, treeTp, pt, useWeaklyCompatible)
printInference(
ptBlock("inferExprInstance",
"tree" -> tree,
@@ -1133,30 +1043,25 @@ trait Infer extends Checkable {
/** Substitute free type variables `undetparams` of polymorphic argument
* expression `tree` to `targs`, Error if `targs` is null.
- *
- * @param tree ...
- * @param undetparams ...
- * @param targs ...
- * @param pt ...
*/
- private def substExpr(tree: Tree, undetparams: List[Symbol],
- targs: List[Type], pt: Type) {
+ private def substExpr(tree: Tree, undetparams: List[Symbol], targs: List[Type], pt: Type) {
if (targs eq null) {
if (!tree.tpe.isErroneous && !pt.isErroneous)
PolymorphicExpressionInstantiationError(tree, undetparams, pt)
- } else {
+ }
+ else {
new TreeTypeSubstituter(undetparams, targs).traverse(tree)
notifyUndetparamsInferred(undetparams, targs)
}
}
- /** Substitute free type variables <code>undetparams</code> of application
- * <code>fn(args)</code>, given prototype <code>pt</code>.
+ /** Substitute free type variables `undetparams` of application
+ * `fn(args)`, given prototype `pt`.
*
* @param fn fn: the function that needs to be instantiated.
* @param undetparams the parameters that need to be determined
* @param args the actual arguments supplied in the call.
- * @param pt the expected type of the function application
+ * @param pt0 the expected type of the function application
* @return The type parameters that remain uninstantiated,
* and that thus have not been substituted.
*/
@@ -1166,7 +1071,7 @@ trait Infer extends Checkable {
try {
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
val formals = formalTypes(mt.paramTypes, args.length)
- val argtpes = actualTypes(args map (x => elimAnonymousClass(x.tpe.deconst)), formals.length)
+ val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst)))
val restpe = fn.tpe.resultType(argtpes)
val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) =
@@ -1202,25 +1107,22 @@ trait Infer extends Checkable {
}
}
- def widen(tp: Type): Type = abstractTypesToBounds(tp)
-
- /** Substitute free type variables <code>undetparams</code> of type constructor
- * <code>tree</code> in pattern, given prototype <code>pt</code>.
+ /** Substitute free type variables `undetparams` of type constructor
+ * `tree` in pattern, given prototype `pt`.
*
* @param tree the constuctor that needs to be instantiated
* @param undetparams the undetermined type parameters
- * @param pt the expected result type of the instance
+ * @param pt0 the expected result type of the instance
*/
def inferConstructorInstance(tree: Tree, undetparams: List[Symbol], pt0: Type) {
- val pt = widen(pt0)
+ val pt = abstractTypesToBounds(pt0)
val ptparams = freeTypeParamsOfTerms(pt)
val ctorTp = tree.tpe
val resTp = ctorTp.finalResultType
debuglog("infer constr inst "+ tree +"/"+ undetparams +"/ pt= "+ pt +" pt0= "+ pt0 +" resTp: "+ resTp)
- /** Compute type arguments for undetermined params
- */
+ /* Compute type arguments for undetermined params */
def inferFor(pt: Type): Option[List[Type]] = {
val tvars = undetparams map freshVar
val resTpV = resTp.instantiateTypeParams(undetparams, tvars)
@@ -1232,13 +1134,13 @@ trait Infer extends Checkable {
val variances =
if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp)
else undetparams map varianceInTypes(ctorTp.paramTypes)
- val targs = solvedTypes(tvars, undetparams, variances, true, lubDepth(List(resTp, pt)))
+ val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(List(resTp, pt)))
// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ")
// no checkBounds here. If we enable it, test bug602 fails.
// TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams
Some(targs)
} catch ifNoInstance { msg =>
- debuglog("NO INST "+ (tvars, tvars map (_.constr)))
+ debuglog("NO INST "+ ((tvars, tvars map (_.constr))))
NoConstructorInstanceError(tree, resTp, pt, msg)
None
}
@@ -1272,109 +1174,59 @@ trait Infer extends Checkable {
}
} else None
- (inferFor(pt) orElse inferForApproxPt) map { targs =>
- new TreeTypeSubstituter(undetparams, targs).traverse(tree)
- notifyUndetparamsInferred(undetparams, targs)
- } getOrElse {
- debugwarn("failed inferConstructorInstance for "+ tree +" : "+ tree.tpe +" under "+ undetparams +" pt = "+ pt +(if(isFullyDefined(pt)) " (fully defined)" else " (not fully defined)"))
- // if (settings.explaintypes.value) explainTypes(resTp.instantiateTypeParams(undetparams, tvars), pt)
- ConstrInstantiationError(tree, resTp, pt)
+ inferFor(pt) orElse inferForApproxPt match {
+ case Some(targs) =>
+ new TreeTypeSubstituter(undetparams, targs).traverse(tree)
+ notifyUndetparamsInferred(undetparams, targs)
+ case _ =>
+ def not = if (isFullyDefined(pt)) "" else "not "
+ devWarning(s"failed inferConstructorInstance for $tree: ${tree.tpe} undet=$undetparams, pt=$pt (${not}fully defined)")
+ ConstrInstantiationError(tree, resTp, pt)
}
}
-
- def instBounds(tvar: TypeVar): (Type, Type) = {
- val tparam = tvar.origin.typeSymbol
- val instType = toOrigin(tvar.constr.inst)
+ def instBounds(tvar: TypeVar): TypeBounds = {
+ val tparam = tvar.origin.typeSymbol
+ val instType = toOrigin(tvar.constr.inst)
+ val TypeBounds(lo, hi) = tparam.info.bounds
val (loBounds, hiBounds) =
- if (instType != NoType && isFullyDefined(instType)) (List(instType), List(instType))
+ if (isFullyDefined(instType)) (List(instType), List(instType))
else (tvar.constr.loBounds, tvar.constr.hiBounds)
- val lo = lub(tparam.info.bounds.lo :: loBounds map toOrigin)
- val hi = glb(tparam.info.bounds.hi :: hiBounds map toOrigin)
- (lo, hi)
+
+ TypeBounds(
+ lub(lo :: loBounds map toOrigin),
+ glb(hi :: hiBounds map toOrigin)
+ )
}
def isInstantiatable(tvars: List[TypeVar]) = {
val tvars1 = tvars map (_.cloneInternal)
// Note: right now it's not clear that solving is complete, or how it can be made complete!
// So we should come back to this and investigate.
- solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (x => COVARIANT), false)
+ solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false)
}
- // this is quite nasty: it destructively changes the info of the syms of e.g., method type params (see #3692, where the type param T's bounds were set to >: T <: T, so that parts looped)
+ // this is quite nasty: it destructively changes the info of the syms of e.g., method type params
+ // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped)
// the changes are rolled back by restoreTypeBounds, but might be unintentially observed in the mean time
def instantiateTypeVar(tvar: TypeVar) {
- val tparam = tvar.origin.typeSymbol
- if (false &&
- tvar.constr.inst != NoType &&
- isFullyDefined(tvar.constr.inst) &&
- (tparam.info.bounds containsType tvar.constr.inst)) {
- context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo tvar.constr.inst
- tparam resetFlag DEFERRED
- debuglog("new alias of " + tparam + " = " + tparam.info)
- } else {
- val (lo, hi) = instBounds(tvar)
- if (lo <:< hi) {
- if (!((lo <:< tparam.info.bounds.lo) && (tparam.info.bounds.hi <:< hi)) // bounds were improved
- && tparam != lo.typeSymbolDirect && tparam != hi.typeSymbolDirect) { // don't create illegal cycles
- context.nextEnclosing(_.tree.isInstanceOf[CaseDef]).pushTypeBounds(tparam)
- tparam setInfo TypeBounds(lo, hi)
- debuglog("new bounds of " + tparam + " = " + tparam.info)
- } else {
- debuglog("redundant: "+tparam+" "+tparam.info+"/"+lo+" "+hi)
- }
- } else {
- debuglog("inconsistent: "+tparam+" "+lo+" "+hi)
- }
- }
- }
-
- /** Does `tp` contain any types that cannot be checked at run-time (i.e., after erasure, will isInstanceOf[erased(tp)] imply conceptualIsInstanceOf[tp]?)
- * we should find a way to ask erasure: hey, is `tp` going to make it through you with all of its isInstanceOf resolving powers intact?
- * TODO: at the very least, reduce duplication wrt checkCheckable
- */
- def containsUnchecked(tp: Type): Boolean = {
- def check(tp: Type, bound: List[Symbol]): Boolean = {
- def isSurroundingTypeParam(sym: Symbol) = {
- val e = context.scope.lookupEntry(sym.name)
- ( (e ne null)
- && (e.sym == sym )
- && !e.sym.isTypeParameterOrSkolem
- && (e.owner == context.scope)
- )
- }
- def isLocalBinding(sym: Symbol) = (
- sym.isAbstractType && (
- (bound contains sym)
- || (sym.name == tpnme.WILDCARD)
- || isSurroundingTypeParam(sym)
- )
- )
- tp.normalize match {
- case SingleType(pre, _) =>
- check(pre, bound)
- case TypeRef(_, ArrayClass, arg :: _) =>
- check(arg, bound)
- case tp @ TypeRef(pre, sym, args) =>
- ( (sym.isAbstractType && !isLocalBinding(sym))
- || (args exists (x => !isLocalBinding(x.typeSymbol)))
- || check(pre, bound)
- )
- // case RefinedType(_, decls) if decls.nonEmpty =>
- // patternWarning(tp, "refinement ")
- case RefinedType(parents, _) =>
- parents exists (p => check(p, bound))
- case ExistentialType(quantified, tp1) =>
- check(tp1, bound ::: quantified)
- case _ =>
- false
+ val tparam = tvar.origin.typeSymbol
+ val TypeBounds(lo0, hi0) = tparam.info.bounds
+ val tb @ TypeBounds(lo1, hi1) = instBounds(tvar)
+
+ if (lo1 <:< hi1) {
+ if (lo1 <:< lo0 && hi0 <:< hi1) // bounds unimproved
+ log(s"redundant bounds: discarding TypeBounds($lo1, $hi1) for $tparam, no improvement on TypeBounds($lo0, $hi0)")
+ else if (tparam == lo1.typeSymbolDirect || tparam == hi1.typeSymbolDirect)
+ log(s"cyclical bounds: discarding TypeBounds($lo1, $hi1) for $tparam because $tparam appears as bounds")
+ else {
+ context.enclosingCaseDef pushTypeBounds tparam
+ tparam setInfo logResult(s"updated bounds: $tparam from ${tparam.info} to")(tb)
}
}
- check(tp, Nil)
+ else log(s"inconsistent bounds: discarding TypeBounds($lo1, $hi1)")
}
-
/** Type intersection of simple type tp1 with general type tp2.
* The result eliminates some redundancies.
*/
@@ -1393,16 +1245,16 @@ trait Infer extends Checkable {
}
def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type, canRemedy: Boolean): Type = {
- val pt = widen(pt0)
+ val pt = abstractTypesToBounds(pt0)
val ptparams = freeTypeParamsOfTerms(pt)
val tpparams = freeTypeParamsOfTerms(pattp)
def ptMatchesPattp = pt matchesPattern pattp.widen
def pattpMatchesPt = pattp matchesPattern pt
- /** If we can absolutely rule out a match we can fail early.
- * This is the case if the scrutinee has no unresolved type arguments
- * and is a "final type", meaning final + invariant in all type parameters.
+ /* If we can absolutely rule out a match we can fail early.
+ * This is the case if the scrutinee has no unresolved type arguments
+ * and is a "final type", meaning final + invariant in all type parameters.
*/
if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) {
IncompatibleScrutineeTypeError(tree0, pattp, pt)
@@ -1438,9 +1290,9 @@ trait Infer extends Checkable {
}
tvars foreach instantiateTypeVar
}
- /** If the scrutinee has free type parameters but the pattern does not,
- * we have to flip the arguments so the expected type is treated as more
- * general when calculating the intersection. See run/bug2755.scala.
+ /* If the scrutinee has free type parameters but the pattern does not,
+ * we have to flip the arguments so the expected type is treated as more
+ * general when calculating the intersection. See run/bug2755.scala.
*/
if (tpparams.isEmpty && ptparams.nonEmpty) intersect(pattp, pt)
else intersect(pt, pattp)
@@ -1500,193 +1352,139 @@ trait Infer extends Checkable {
/* -- Overload Resolution ---------------------------------------------- */
-/*
- def checkNotShadowed(pos: Position, pre: Type, best: Symbol, eligible: List[Symbol]) =
- if (!phase.erasedTypes)
- for (alt <- eligible) {
- if (isProperSubClassOrObject(alt.owner, best.owner))
- error(pos,
- "erroneous reference to overloaded definition,\n"+
- "most specific definition is: "+best+best.locationString+" of type "+pre.memberType(best)+
- ",\nyet alternative definition "+alt+alt.locationString+" of type "+pre.memberType(alt)+
- "\nis defined in a subclass")
- }
-*/
-
- /** Assign <code>tree</code> the symbol and type of the alternative which
- * matches prototype <code>pt</code>, if it exists.
+ /** Assign `tree` the symbol and type of the alternative which
+ * matches prototype `pt`, if it exists.
* If several alternatives match `pt`, take parameterless one.
* If no alternative matches `pt`, take the parameterless one anyway.
*/
- def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match {
- case OverloadedType(pre, alts) => tryTwice { isSecondTry =>
- val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt))
- val noAlternatives = alts0.isEmpty
- val alts1 = if (noAlternatives) alts else alts0
-
- //println("trying "+alts1+(alts1 map (_.tpe))+(alts1 map (_.locationString))+" for "+pt)
- def improves(sym1: Symbol, sym2: Symbol): Boolean =
- sym2 == NoSymbol || sym2.hasAnnotation(BridgeClass) ||
- { val tp1 = pre.memberType(sym1)
- val tp2 = pre.memberType(sym2)
- (tp2 == ErrorType ||
- !global.typer.infer.isWeaklyCompatible(tp2, pt) && global.typer.infer.isWeaklyCompatible(tp1, pt) ||
- isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)) }
-
- val best = ((NoSymbol: Symbol) /: alts1) ((best, alt) =>
- if (improves(alt, best)) alt else best)
-
- val competing = alts1 dropWhile (alt => best == alt || improves(best, alt))
-
- if (best == NoSymbol) {
- if (settings.debug.value) {
- tree match {
- case Select(qual, _) =>
- Console.println("qual: " + qual + ":" + qual.tpe +
- " with decls " + qual.tpe.decls +
- " with members " + qual.tpe.members +
- " with members " + qual.tpe.member(newTermName("$minus")))
- case _ =>
- }
- }
- // todo: missing test case
- NoBestExprAlternativeError(tree, pt, isSecondTry)
- } else if (!competing.isEmpty) {
- if (noAlternatives) NoBestExprAlternativeError(tree, pt, isSecondTry)
- else if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt, isSecondTry)
- else {
+ def inferExprAlternative(tree: Tree, pt: Type): Tree = {
+ def tryOurBests(pre: Type, alts: List[Symbol], isSecondTry: Boolean): Unit = {
+ val alts0 = alts filter (alt => isWeaklyCompatible(pre memberType alt, pt))
+ val alts1 = if (alts0.isEmpty) alts else alts0
+ val bests = bestAlternatives(alts1) { (sym1, sym2) =>
+ val tp1 = pre memberType sym1
+ val tp2 = pre memberType sym2
+
+ ( (tp2 eq ErrorType)
+ || isWeaklyCompatible(tp1, pt) && !isWeaklyCompatible(tp2, pt)
+ || isStrictlyMoreSpecific(tp1, tp2, sym1, sym2)
+ )
+ }
+ // todo: missing test case for bests.isEmpty
+ bests match {
+ case best :: Nil => tree setSymbol best setType (pre memberType best)
+ case best :: competing :: _ if alts0.nonEmpty =>
// SI-6912 Don't give up and leave an OverloadedType on the tree.
// Originally I wrote this as `if (secondTry) ... `, but `tryTwice` won't attempt the second try
// unless an error is issued. We're not issuing an error, in the assumption that it would be
// spurious in light of the erroneous expected type
- setError(tree)
- }
- } else {
-// val applicable = alts1 filter (alt =>
-// global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt))
-// checkNotShadowed(tree.pos, pre, best, applicable)
- tree.setSymbol(best).setType(pre.memberType(best))
+ if (pt.isErroneous) setError(tree)
+ else AmbiguousExprAlternativeError(tree, pre, best, competing, pt, isSecondTry)
+ case _ => if (bests.isEmpty || alts0.isEmpty) NoBestExprAlternativeError(tree, pt, isSecondTry)
}
}
- }
-
- @inline private def inSilentMode(context: Context)(expr: => Boolean): Boolean = {
- val oldState = context.state
- context.setBufferErrors()
- val res = expr
- val contextWithErrors = context.hasErrors
- context.flushBuffer()
- context.restoreState(oldState)
- res && !contextWithErrors
+ tree.tpe match {
+ case OverloadedType(pre, alts) => tryTwice(tryOurBests(pre, alts, _)) ; tree
+ case _ => tree
+ }
}
// Checks against the name of the parameter and also any @deprecatedName.
private def paramMatchesName(param: Symbol, name: Name) =
param.name == name || param.deprecatedParamName.exists(_ == name)
- // Check the first parameter list the same way.
- private def methodMatchesName(method: Symbol, name: Name) = method.paramss match {
- case ps :: _ => ps exists (p => paramMatchesName(p, name))
- case _ => false
+ private def containsNamedType(argtpes: List[Type]): Boolean = argtpes match {
+ case Nil => false
+ case NamedType(_, _) :: _ => true
+ case _ :: rest => containsNamedType(rest)
}
-
- private def resolveOverloadedMethod(argtpes: List[Type], eligible: List[Symbol]) = {
+ private def namesOfNamedArguments(argtpes: List[Type]) =
+ argtpes collect { case NamedType(name, _) => name }
+
+ /** Given a list of argument types and eligible method overloads, whittle the
+ * list down to the methods which should be considered for specificity
+ * testing, taking into account here:
+ * - named arguments at the call site (keep only methods with name-matching parameters)
+ * - if multiple methods are eligible, drop any methods which take default arguments
+ * - drop any where arity cannot match under any conditions (allowing for
+ * overloaded applies, varargs, and tupling conversions)
+ * This method is conservative; it can tolerate some varieties of false positive,
+ * but no false negatives.
+ *
+ * @param eligible the overloaded method symbols
+ * @param argtpes the argument types at the call site
+ * @param varargsStar true if the call site has a `: _*` attached to the last argument
+ */
+ private def overloadsToConsiderBySpecificity(eligible: List[Symbol], argtpes: List[Type], varargsStar: Boolean): List[Symbol] = {
// If there are any foo=bar style arguments, and any of the overloaded
// methods has a parameter named `foo`, then only those methods are considered.
- val namesOfArgs = argtpes collect { case NamedType(name, _) => name }
- val namesMatch = (
- if (namesOfArgs.isEmpty) Nil
- else eligible filter { m =>
- namesOfArgs forall { name =>
- methodMatchesName(m, name)
- }
- }
- )
-
- if (namesMatch.nonEmpty) namesMatch
- else if (eligible.isEmpty || eligible.tail.isEmpty) eligible
- else eligible filter { alt =>
- // for functional values, the `apply` method might be overloaded
- val mtypes = followApply(alt.tpe) match {
- case OverloadedType(_, alts) => alts map (_.tpe)
- case t => t :: Nil
- }
- // Drop those that use a default; keep those that use vararg/tupling conversion.
- mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && (
- compareLengths(t.params, argtpes) < 0 // tupling (*)
- || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- )
- )
- // (*) more arguments than parameters, but still applicable: tupling conversion works.
- // todo: should not return "false" when paramTypes = (Unit) no argument is given
- // (tupling would work)
+ val namesMatch = namesOfNamedArguments(argtpes) match {
+ case Nil => Nil
+ case names => eligible filter (m => names forall (name => m.info.params exists (p => paramMatchesName(p, name))))
}
+ if (namesMatch.nonEmpty)
+ namesMatch
+ else if (eligible.isEmpty || eligible.tail.isEmpty)
+ eligible
+ else
+ eligible filter (alt =>
+ !alt.hasDefault && isApplicableBasedOnArity(alt.tpe, argtpes.length, varargsStar, tuplingAllowed = true)
+ )
}
- /** Assign <code>tree</code> the type of an alternative which is applicable
- * to <code>argtpes</code>, and whose result type is compatible with `pt`.
+ /** Assign `tree` the type of an alternative which is applicable
+ * to `argtpes`, and whose result type is compatible with `pt`.
* If several applicable alternatives exist, drop the alternatives which use
* default arguments, then select the most specialized one.
* If no applicable alternative exists, and pt != WildcardType, try again
* with pt = WildcardType.
* Otherwise, if there is no best alternative, error.
*
- * @param argtpes contains the argument types. If an argument is named, as
+ * @param argtpes0 contains the argument types. If an argument is named, as
* "a = 3", the corresponding type is `NamedType("a", Int)'. If the name
* of some NamedType does not exist in an alternative's parameter names,
* the type is replaces by `Unit`, i.e. the argument is treated as an
* assignment expression.
+ *
+ * @pre tree.tpe is an OverloadedType.
*/
- def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false, lastInferAttempt: Boolean = true): Unit = tree.tpe match {
- case OverloadedType(pre, alts) =>
- val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
- tryTwice { isSecondTry =>
- debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
-
- def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
- isVarArgsList(alt.tpe.params)
- && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
- )
- val applicable = resolveOverloadedMethod(argtpes,
- alts filter (alt =>
- varargsApplicableCheck(alt)
- && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
- )
- )
-
- def improves(sym1: Symbol, sym2: Symbol) = {
- // util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
- sym2 == NoSymbol || sym2.isError || sym2.hasAnnotation(BridgeClass) ||
- isStrictlyMoreSpecific(followApply(pre.memberType(sym1)),
- followApply(pre.memberType(sym2)), sym1, sym2)
- }
-
- val best = ((NoSymbol: Symbol) /: applicable) ((best, alt) =>
- if (improves(alt, best)) alt else best)
- val competing = applicable.dropWhile(alt => best == alt || improves(best, alt))
- if (best == NoSymbol) {
- if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt, isSecondTry && lastInferAttempt)
- else inferMethodAlternative(tree, undetparams, argtpes, WildcardType, lastInferAttempt = isSecondTry)
- } else if (!competing.isEmpty) {
- AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt, isSecondTry && lastInferAttempt)
- } else {
-// checkNotShadowed(tree.pos, pre, best, applicable)
- tree.setSymbol(best).setType(pre.memberType(best))
- }
+ def inferMethodAlternative(tree: Tree, undetparams: List[Symbol], argtpes0: List[Type], pt0: Type): Unit = {
+ val OverloadedType(pre, alts) = tree.tpe
+ var varargsStar = false
+ val argtpes = argtpes0 mapConserve {
+ case RepeatedType(tp) => varargsStar = true ; tp
+ case tp => tp
+ }
+ def followType(sym: Symbol) = followApply(pre memberType sym)
+ def bestForExpectedType(pt: Type, isLastTry: Boolean): Unit = {
+ val applicable0 = alts filter (alt => context inSilentMode isApplicable(undetparams, followType(alt), argtpes, pt))
+ val applicable = overloadsToConsiderBySpecificity(applicable0, argtpes, varargsStar)
+ val ranked = bestAlternatives(applicable)((sym1, sym2) =>
+ isStrictlyMoreSpecific(followType(sym1), followType(sym2), sym1, sym2)
+ )
+ ranked match {
+ case best :: competing :: _ => AmbiguousMethodAlternativeError(tree, pre, best, competing, argtpes, pt, isLastTry) // ambiguous
+ case best :: Nil => tree setSymbol best setType (pre memberType best) // success
+ case Nil if pt.isWildcard => NoBestMethodAlternativeError(tree, argtpes, pt, isLastTry) // failed
+ case Nil => bestForExpectedType(WildcardType, isLastTry) // failed, but retry with WildcardType
}
- case _ =>
+ }
+ // This potentially makes up to four attempts: tryTwice may execute
+ // with and without views enabled, and bestForExpectedType will try again
+ // with pt = WildcardType if it fails with pt != WildcardType.
+ tryTwice { isLastTry =>
+ val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
+ bestForExpectedType(pt, isLastTry)
+ }
}
/** Try inference twice, once without views and once with views,
* unless views are already disabled.
- *
- * @param infer ...
*/
def tryTwice(infer: Boolean => Unit): Unit = {
if (context.implicitsEnabled) {
- val saved = context.state
+ val savedContextMode = context.contextMode
var fallback = false
context.setBufferErrors()
// We cache the current buffer because it is impossible to
@@ -1700,65 +1498,59 @@ trait Infer extends Checkable {
context.withImplicitsDisabled(infer(false))
if (context.hasErrors) {
fallback = true
- context.restoreState(saved)
+ context.contextMode = savedContextMode
context.flushBuffer()
infer(true)
}
} catch {
case ex: CyclicReference => throw ex
case ex: TypeError => // recoverable cyclic references
- context.restoreState(saved)
+ context.contextMode = savedContextMode
if (!fallback) infer(true) else ()
} finally {
- context.restoreState(saved)
+ context.contextMode = savedContextMode
context.updateBuffer(errorsToRestore)
}
}
else infer(true)
}
- /** Assign <code>tree</code> the type of all polymorphic alternatives
- * with <code>nparams</code> as the number of type parameters, if it exists.
+ /** Assign `tree` the type of all polymorphic alternatives
+ * which have the same number of type parameters as does `argtypes`
+ * with all argtypes are within the corresponding type parameter bounds.
* If no such polymorphic alternative exist, error.
- *
- * @param tree ...
- * @param nparams ...
*/
def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = {
val OverloadedType(pre, alts) = tree.tpe
- val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
- def fail(kind: PolyAlternativeErrorKind.ErrorType) =
- PolyAlternativeError(tree, argtypes, sym0, kind)
-
- if (sym0 == NoSymbol) return (
- if (alts exists (_.typeParams.nonEmpty))
- fail(PolyAlternativeErrorKind.WrongNumber)
- else fail(PolyAlternativeErrorKind.NoParams))
-
- val (resSym, resTpe) = {
- if (!sym0.isOverloaded)
- (sym0, pre.memberType(sym0))
- else {
- val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes))
- if (sym == NoSymbol) {
- if (argtypes forall (x => !x.isErroneous))
- fail(PolyAlternativeErrorKind.ArgsDoNotConform)
- return
- }
- else if (sym.isOverloaded) {
- val xs = sym.alternatives
- val tparams = new AsSeenFromMap(pre, xs.head.owner) mapOver xs.head.typeParams
- val bounds = tparams map (_.tpeHK) // see e.g., #1236
- val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), xs))
-
- (sym setInfo tpe, tpe)
- }
- else (sym, pre.memberType(sym))
- }
+ // Alternatives with a matching length type parameter list
+ val matchingLength = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes))
+ def allMonoAlts = alts forall (_.typeParams.isEmpty)
+ def errorKind = matchingLength match {
+ case NoSymbol if allMonoAlts => PolyAlternativeErrorKind.NoParams // no polymorphic method alternative
+ case NoSymbol => PolyAlternativeErrorKind.WrongNumber // wrong number of tparams
+ case _ => PolyAlternativeErrorKind.ArgsDoNotConform // didn't conform to bounds
+ }
+ def fail() = PolyAlternativeError(tree, argtypes, matchingLength, errorKind)
+ def finish(sym: Symbol, tpe: Type) = tree setSymbol sym setType tpe
+ // Alternatives which conform to bounds
+ def checkWithinBounds(sym: Symbol) = sym.alternatives match {
+ case Nil if argtypes.exists(_.isErroneous) =>
+ case Nil => fail()
+ case alt :: Nil => finish(alt, pre memberType alt)
+ case alts @ (hd :: _) =>
+ log(s"Attaching AntiPolyType-carrying overloaded type to $sym")
+ // Multiple alternatives which are within bounds; spin up an
+ // overloaded type which carries an "AntiPolyType" as a prefix.
+ val tparams = newAsSeenFromMap(pre, hd.owner) mapOver hd.typeParams
+ val bounds = tparams map (_.tpeHK) // see e.g., #1236
+ val tpe = PolyType(tparams, OverloadedType(AntiPolyType(pre, bounds), alts))
+ finish(sym setInfo tpe, tpe)
+ }
+ matchingLength.alternatives match {
+ case Nil => fail()
+ case alt :: Nil => finish(alt, pre memberType alt)
+ case _ => checkWithinBounds(matchingLength filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes)))
}
- // Side effects tree with symbol and type
- tree setSymbol resSym setType resTpe
}
}
}
-
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 816f977890..6c4d1e20aa 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -1,19 +1,21 @@
package scala.tools.nsc
package typechecker
+import java.lang.Math.min
import symtab.Flags._
import scala.tools.nsc.util._
-import scala.tools.nsc.util.ClassPath._
import scala.reflect.runtime.ReflectionUtils
import scala.collection.mutable.ListBuffer
-import scala.compat.Platform.EOL
+import scala.reflect.ClassTag
import scala.reflect.internal.util.Statistics
import scala.reflect.macros.util._
-import java.lang.{Class => jClass}
-import java.lang.reflect.{Array => jArray, Method => jMethod}
-import scala.reflect.internal.util.Collections._
import scala.util.control.ControlThrowable
-import scala.reflect.macros.runtime.AbortMacroException
+import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes}
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.macros.compiler.DefaultMacroCompiler
+import scala.tools.reflect.FastTrack
+import scala.runtime.ScalaRunTime
+import Fingerprint._
/**
* Code to deal with macros, namely with:
@@ -40,15 +42,22 @@ import scala.reflect.macros.runtime.AbortMacroException
* (Expr(elems))
* (TypeTag(Int))
*/
-trait Macros extends scala.tools.reflect.FastTrack with Traces {
+trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
self: Analyzer =>
import global._
import definitions._
import treeInfo.{isRepeatedParamType => _, _}
import MacrosStats._
+
def globalSettings = global.settings
+ protected def findMacroClassLoader(): ClassLoader = {
+ val classpath = global.classPath.asURLs
+ macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
+ ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
+ }
+
/** `MacroImplBinding` and its companion module are responsible for
* serialization/deserialization of macro def -> impl bindings.
*
@@ -71,22 +80,31 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* Includes a path to load the implementation via Java reflection,
* and various accounting information necessary when composing an argument list for the reflective invocation.
*/
- private case class MacroImplBinding(
+ case class MacroImplBinding(
+ // Is this macro impl a bundle (a trait extending Macro) or a vanilla def?
+ val isBundle: Boolean,
// Java class name of the class that contains the macro implementation
// is used to load the corresponding object with Java reflection
- val className: String,
+ className: String,
// method name of the macro implementation
// `className` and `methName` are all we need to reflectively invoke a macro implementation
// because macro implementations cannot be overloaded
- val methName: String,
- // flattens the macro impl's parameter lists having symbols replaced with metadata
- // currently metadata is an index of the type parameter corresponding to that type tag (if applicable)
- // f.ex. for: def impl[T: WeakTypeTag, U: WeakTypeTag, V](c: Context)(x: c.Expr[T]): (U, V) = ???
- // `signature` will be equal to List(-1, -1, 0, 1)
- val signature: List[Int],
+ methName: String,
+ // flattens the macro impl's parameter lists having symbols replaced with their fingerprints
+ // currently fingerprints are calculated solely from types of the symbols:
+ // * c.Expr[T] => IMPLPARAM_EXPR
+ // * c.WeakTypeTag[T] => index of the type parameter corresponding to that type tag
+ // * everything else (e.g. scala.reflect.macros.Context) => IMPLPARAM_OTHER
+ // f.ex. for: def impl[T: WeakTypeTag, U, V: WeakTypeTag](c: Context)(x: c.Expr[T], y: c.Tree): (U, V) = ???
+ // `signature` will be equal to List(List(Other), List(Lifted, Other), List(Tagged(0), Tagged(2)))
+ signature: List[List[Fingerprint]],
// type arguments part of a macro impl ref (the right-hand side of a macro definition)
// these trees don't refer to a macro impl, so we can pickle them as is
- val targs: List[Tree])
+ targs: List[Tree]) {
+
+ // Was this binding derived from a `def ... = macro ???` definition?
+ def is_??? = className == Predef_???.owner.javaClassName && methName == Predef_???.name.encoded
+ }
/** Macro def -> macro impl bindings are serialized into a `macroImpl` annotation
* with synthetic content that carries the payload described in `MacroImplBinding`.
@@ -99,36 +117,40 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
*
* @scala.reflect.macros.internal.macroImpl(
* `macro`(
- * "signature" = List(-1),
+ * "isBundle" = false,
+ * "signature" = List(Other),
* "methodName" = "impl",
- * "versionFormat" = 1,
+ * "versionFormat" = <current version format>,
* "className" = "Macros$"))
*/
- private object MacroImplBinding {
- val versionFormat = 1
+ object MacroImplBinding {
+ val versionFormat = 4.0
def pickleAtom(obj: Any): Tree =
obj match {
case list: List[_] => Apply(Ident(ListModule), list map pickleAtom)
case s: String => Literal(Constant(s))
- case i: Int => Literal(Constant(i))
+ case d: Double => Literal(Constant(d))
+ case b: Boolean => Literal(Constant(b))
+ case f: Fingerprint => Literal(Constant(f.value))
}
def unpickleAtom(tree: Tree): Any =
tree match {
case Apply(list @ Ident(_), args) if list.symbol == ListModule => args map unpickleAtom
case Literal(Constant(s: String)) => s
- case Literal(Constant(i: Int)) => i
+ case Literal(Constant(d: Double)) => d
+ case Literal(Constant(b: Boolean)) => b
+ case Literal(Constant(i: Int)) => new Fingerprint(i)
}
def pickle(macroImplRef: Tree): Tree = {
- val MacroImplReference(owner, macroImpl, targs) = macroImplRef
- val paramss = macroImpl.paramss
+ val MacroImplReference(isBundle, owner, macroImpl, targs) = macroImplRef
// todo. refactor when fixing SI-5498
def className: String = {
def loop(sym: Symbol): String = sym match {
- case sym if sym.owner.isPackageClass =>
+ case sym if sym.isTopLevel =>
val suffix = if (sym.isModuleClass) "$" else ""
sym.fullName + suffix
case sym =>
@@ -139,13 +161,20 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
loop(owner)
}
- def signature: List[Int] = {
- val transformed = transformTypeTagEvidenceParams(paramss, (param, tparam) => tparam)
- transformed.flatten map (p => if (p.isTerm) -1 else p.paramPos)
+ def signature: List[List[Fingerprint]] = {
+ def fingerprint(tpe: Type): Fingerprint = tpe.dealiasWiden match {
+ case TypeRef(_, RepeatedParamClass, underlying :: Nil) => fingerprint(underlying)
+ case ExprClassOf(_) => Lifted
+ case _ => Other
+ }
+
+ val transformed = transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => tparam)
+ mmap(transformed)(p => if (p.isTerm) fingerprint(p.info) else Tagged(p.paramPos))
}
val payload = List[(String, Any)](
"versionFormat" -> versionFormat,
+ "isBundle" -> isBundle,
"className" -> className,
"methodName" -> macroImpl.name.toString,
"signature" -> signature
@@ -185,472 +214,240 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val Apply(_, pickledPayload) = wrapped
val payload = pickledPayload.map{ case Assign(k, v) => (unpickleAtom(k), unpickleAtom(v)) }.toMap
- val pickleVersionFormat = payload("versionFormat").asInstanceOf[Int]
- if (versionFormat != pickleVersionFormat) throw new Error("macro impl binding format mismatch: expected $versionFormat, actual $pickleVersionFormat")
+ def fail(msg: String) = abort(s"bad macro impl binding: $msg")
+ def unpickle[T](field: String, clazz: Class[T]): T = {
+ def failField(msg: String) = fail(s"$field $msg")
+ if (!payload.contains(field)) failField("is supposed to be there")
+ val raw: Any = payload(field)
+ if (raw == null) failField(s"is not supposed to be null")
+ val expected = ScalaRunTime.box(clazz)
+ val actual = raw.getClass
+ if (!expected.isAssignableFrom(actual)) failField(s"has wrong type: expected $expected, actual $actual")
+ raw.asInstanceOf[T]
+ }
+
+ val pickleVersionFormat = unpickle("versionFormat", classOf[Double])
+ if (versionFormat != pickleVersionFormat) fail(s"expected version format $versionFormat, actual $pickleVersionFormat")
- val className = payload("className").asInstanceOf[String]
- val methodName = payload("methodName").asInstanceOf[String]
- val signature = payload("signature").asInstanceOf[List[Int]]
- MacroImplBinding(className, methodName, signature, targs)
+ val isBundle = unpickle("isBundle", classOf[Boolean])
+ val className = unpickle("className", classOf[String])
+ val methodName = unpickle("methodName", classOf[String])
+ val signature = unpickle("signature", classOf[List[List[Fingerprint]]])
+ MacroImplBinding(isBundle, className, methodName, signature, targs)
}
}
- private def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
+ def bindMacroImpl(macroDef: Symbol, macroImplRef: Tree): Unit = {
val pickle = MacroImplBinding.pickle(macroImplRef)
macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil)
}
- private def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
+ def loadMacroImplBinding(macroDef: Symbol): MacroImplBinding = {
val Some(AnnotationInfo(_, List(pickle), _)) = macroDef.getAnnotation(MacroImplAnnotation)
MacroImplBinding.unpickle(pickle)
}
- /** Transforms parameters lists of a macro impl.
- * The `transform` function is invoked only for WeakTypeTag evidence parameters.
- *
- * The transformer takes two arguments: a value parameter from the parameter list
- * and a type parameter that is witnesses by the value parameter.
- *
- * If the transformer returns a NoSymbol, the value parameter is not included from the result.
- * If the transformer returns something else, this something else is included in the result instead of the value parameter.
- *
- * Despite of being highly esoteric, this function significantly simplifies signature analysis.
- * For example, it can be used to strip macroImpl.paramss from the evidences (necessary when checking def <-> impl correspondence)
- * or to streamline creation of the list of macro arguments.
- */
- private def transformTypeTagEvidenceParams(paramss: List[List[Symbol]], transform: (Symbol, Symbol) => Symbol): List[List[Symbol]] = {
- if (paramss.isEmpty || paramss.last.isEmpty) return paramss // no implicit parameters in the signature => nothing to do
- if (paramss.head.isEmpty || !(paramss.head.head.tpe <:< MacroContextClass.tpe)) return paramss // no context parameter in the signature => nothing to do
- def transformTag(param: Symbol): Symbol = param.tpe.dealias match {
- case TypeRef(SingleType(SingleType(NoPrefix, c), universe), WeakTypeTagClass, targ :: Nil)
- if c == paramss.head.head && universe == MacroContextUniverse =>
- transform(param, targ.typeSymbol)
- case _ =>
- param
- }
- val transformed = paramss.last map transformTag filter (_ ne NoSymbol)
- if (transformed.isEmpty) paramss.init else paramss.init :+ transformed
- }
-
- def computeMacroDefTypeFromMacroImpl(macroDdef: DefDef, macroImpl: Symbol): Type = {
- // Step I. Transform c.Expr[T] to T
- var runtimeType = macroImpl.tpe.finalResultType.dealias match {
- case TypeRef(_, ExprClass, runtimeType :: Nil) => runtimeType
- case _ => AnyTpe // so that macro impls with rhs = ??? don't screw up our inference
- }
-
- // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
- runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, loadMacroImplBinding(macroDdef.symbol).targs.map(_.tpe))
-
- // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
- def unsigma(tpe: Type): Type =
- transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol) match {
- case (implCtxParam :: Nil) :: implParamss =>
- val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
- object UnsigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
- ThisType(macroDdef.symbol.owner)
- case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
- implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ def computeMacroDefTypeFromMacroImplRef(macroDdef: DefDef, macroImplRef: Tree): Type = {
+ macroImplRef match {
+ case MacroImplReference(_, _, macroImpl, targs) =>
+ // Step I. Transform c.Expr[T] to T and everything else to Any
+ var runtimeType = decreaseMetalevel(macroImpl.info.finalResultType)
+
+ // Step II. Transform type parameters of a macro implementation into type arguments in a macro definition's body
+ runtimeType = runtimeType.substituteTypes(macroImpl.typeParams, targs map (_.tpe))
+
+ // Step III. Transform c.prefix.value.XXX to this.XXX and implParam.value.YYY to defParam.YYY
+ def unsigma(tpe: Type): Type =
+ transformTypeTagEvidenceParams(macroImplRef, (param, tparam) => NoSymbol) match {
+ case (implCtxParam :: Nil) :: implParamss =>
+ val implToDef = flatMap2(implParamss, macroDdef.vparamss)(map2(_, _)((_, _))).toMap
+ object UnsigmaTypeMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, args) =>
+ val pre1 = pre match {
+ case SingleType(SingleType(SingleType(NoPrefix, c), prefix), value) if c == implCtxParam && prefix == MacroContextPrefix && value == ExprValue =>
+ ThisType(macroDdef.symbol.owner)
+ case SingleType(SingleType(NoPrefix, implParam), value) if value == ExprValue =>
+ implToDef get implParam map (defParam => SingleType(NoPrefix, defParam.symbol)) getOrElse pre
+ case _ =>
+ pre
+ }
+ val args1 = args map mapOver
+ TypeRef(pre1, sym, args1)
case _ =>
- pre
+ mapOver(tp)
}
- val args1 = args map mapOver
- TypeRef(pre1, sym, args1)
- case _ =>
- mapOver(tp)
- }
- }
-
- UnsigmaTypeMap(tpe)
- case _ =>
- tpe
- }
-
- unsigma(runtimeType)
- }
-
- /** A reference macro implementation signature compatible with a given macro definition.
- *
- * In the example above for the following macro def:
- * def foo[T](xs: List[T]): T = macro fooBar
- *
- * This function will return:
- * (c: scala.reflect.macros.Context)(xs: c.Expr[List[T]]): c.Expr[T]
- *
- * Note that type tag evidence parameters are not included into the result.
- * Type tag context bounds for macro impl tparams are optional.
- * Therefore compatibility checks ignore such parameters, and we don't need to bother about them here.
- *
- * @param macroDef The macro definition symbol
- * @param tparams The type parameters of the macro definition
- * @param vparamss The value parameters of the macro definition
- * @param retTpe The return type of the macro definition
- */
- private def macroImplSig(macroDef: Symbol, tparams: List[TypeDef], vparamss: List[List[ValDef]], retTpe: Type): (List[List[Symbol]], Type) = {
- // had to move method's body to an object because of the recursive dependencies between sigma and param
- object SigGenerator {
- def sigma(tpe: Type): Type = {
- class SigmaTypeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- val pre1 = pre match {
- case ThisType(sym) if sym == macroDef.owner =>
- SingleType(SingleType(SingleType(NoPrefix, ctxParam), MacroContextPrefix), ExprValue)
- case SingleType(NoPrefix, sym) =>
- mfind(vparamss)(_.symbol == sym) match {
- case Some(macroDefParam) => SingleType(SingleType(NoPrefix, param(macroDefParam)), ExprValue)
- case _ => pre
- }
- case _ =>
- pre
}
- TypeRef(pre1, sym, args map mapOver)
+
+ UnsigmaTypeMap(tpe)
case _ =>
- mapOver(tp)
+ tpe
}
- }
-
- new SigmaTypeMap() apply tpe
- }
- def makeParam(name: Name, pos: Position, tpe: Type, flags: Long = 0L) =
- macroDef.newValueParameter(name, pos, flags) setInfo tpe
- val ctxParam = makeParam(nme.macroContext, macroDef.pos, MacroContextClass.tpe, SYNTHETIC)
- def implType(isType: Boolean, origTpe: Type): Type =
- if (isRepeatedParamType(origTpe))
- appliedType(
- RepeatedParamClass.typeConstructor,
- List(implType(isType, sigma(origTpe.typeArgs.head))))
- else {
- val tsym = getMember(MacroContextClass, if (isType) tpnme.WeakTypeTag else tpnme.Expr)
- typeRef(singleType(NoPrefix, ctxParam), tsym, List(sigma(origTpe)))
- }
- val paramCache = scala.collection.mutable.Map[Symbol, Symbol]()
- def param(tree: Tree): Symbol =
- paramCache.getOrElseUpdate(tree.symbol, {
- val sym = tree.symbol
- makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
- })
-
- val paramss = List(ctxParam) :: mmap(vparamss)(param)
- val implRetTpe = typeRef(singleType(NoPrefix, ctxParam), getMember(MacroContextClass, tpnme.Expr), List(sigma(retTpe)))
+ unsigma(runtimeType)
+ case _ =>
+ ErrorType
}
-
- import SigGenerator._
- macroLogVerbose(sm"""
- |generating macroImplSigs for: $macroDef
- |tparams are: $tparams
- |vparamss are: $vparamss
- |retTpe is: $retTpe
- |macroImplSig is: $paramss, $implRetTpe
- """.trim)
- (paramss, implRetTpe)
}
- /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method,
+ /** Verifies that the body of a macro def typechecks to a reference to a static public non-overloaded method or a top-level macro bundle,
* and that that method is signature-wise compatible with the given macro definition.
*
- * @return Typechecked rhs of the given macro definition if everything is okay.
+ * @return Macro impl reference for the given macro definition if everything is okay.
* EmptyTree if an error occurs.
*/
- def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree =
- try new MacroTyper(typer, macroDdef).typed
- catch { case MacroBodyTypecheckException => EmptyTree }
-
- class MacroTyper(val typer: Typer, val macroDdef: DefDef) extends MacroErrors {
- // Phase I: sanity checks
+ def typedMacroBody(typer: Typer, macroDdef: DefDef): Tree = {
val macroDef = macroDdef.symbol
- macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
- assert(macroDef.isTermMacro, macroDdef)
- if (fastTrack contains macroDef) MacroDefIsFastTrack()
- if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) MacroFeatureNotEnabled()
-
- // we use typed1 instead of typed, because otherwise adapt is going to mess us up
- // if adapt sees <qualifier>.<method>, it will want to perform eta-expansion and will fail
- // unfortunately, this means that we have to manually trigger macro expansion
- // because it's adapt which is responsible for automatic expansion during typechecking
- def typecheckRhs(rhs: Tree): Tree = {
- try {
- // interestingly enough, just checking isErroneous doesn't cut it
- // e.g. a "type arguments [U] do not conform to method foo's type parameter bounds" error
- // doesn't manifest itself as an error in the resulting tree
- val prevNumErrors = reporter.ERROR.count
- var rhs1 = typer.typed1(rhs, EXPRmode, WildcardType)
- def rhsNeedsMacroExpansion = rhs1.symbol != null && rhs1.symbol.isTermMacro && !rhs1.symbol.isErroneous
- while (rhsNeedsMacroExpansion) {
- rhs1 = macroExpand1(typer, rhs1) match {
- case Success(expanded) =>
- try {
- val typechecked = typer.typed1(expanded, EXPRmode, WildcardType)
- macroLogVerbose("typechecked1:%n%s%n%s".format(typechecked, showRaw(typechecked)))
- typechecked
- } finally {
- popMacroContext()
- }
- case Delay(delayed) =>
- typer.instantiate(delayed, EXPRmode, WildcardType)
- case Fallback(fallback) =>
- typer.typed1(fallback, EXPRmode, WildcardType)
- case Other(result) =>
- result
- }
- }
- val typecheckedWithErrors = (rhs1 exists (_.isErroneous)) || reporter.ERROR.count != prevNumErrors
- if (typecheckedWithErrors) MacroDefUntypeableBodyError()
- rhs1
- } catch {
- case ex: TypeError =>
- typer.reportTypeError(context, rhs.pos, ex)
- MacroDefUntypeableBodyError()
- }
- }
-
- // Phase II: typecheck the right-hand side of the macro def
- val typed = typecheckRhs(macroDdef.rhs)
- typed match {
- case MacroImplReference(_, meth, _) if meth == Predef_??? =>
- bindMacroImpl(macroDef, typed)
- MacroDefIsQmarkQmarkQmark()
- case MacroImplReference(owner, meth, targs) =>
- if (!meth.isMethod) MacroDefInvalidBodyError()
- if (!meth.isPublic) MacroImplNotPublicError()
- if (meth.isOverloaded) MacroImplOverloadedError()
- if (!owner.isStaticOwner && !owner.moduleClass.isStaticOwner) MacroImplNotStaticError()
- if (meth.typeParams.length != targs.length) MacroImplWrongNumberOfTypeArgumentsError(typed)
- bindMacroImpl(macroDef, typed)
- case _ =>
- MacroDefInvalidBodyError()
- }
-
- // Phase III: check compatibility between the macro def and its macro impl
- // this check ignores type tag evidence parameters, because type tag context bounds are optional
- // aXXX (e.g. aparamss) => characteristics of the macro impl ("a" stands for "actual")
- // rXXX (e.g. rparamss) => characteristics of a reference macro impl signature synthesized from the macro def ("r" stands for "reference")
- val macroImpl = typed.symbol
- val aparamss = transformTypeTagEvidenceParams(macroImpl.paramss, (param, tparam) => NoSymbol)
- val aret = macroImpl.tpe.finalResultType
- val macroDefRet =
- if (!macroDdef.tpt.isEmpty) typer.typedType(macroDdef.tpt).tpe
- else computeMacroDefTypeFromMacroImpl(macroDdef, macroImpl)
- val (rparamss, rret) = macroImplSig(macroDef, macroDdef.tparams, macroDdef.vparamss, macroDefRet)
-
- val implicitParams = aparamss.flatten filter (_.isImplicit)
- if (implicitParams.nonEmpty) MacroImplNonTagImplicitParameters(implicitParams)
- if (aparamss.length != rparamss.length) MacroImplParamssMismatchError()
-
- val atparams = macroImpl.typeParams
- val atvars = atparams map freshVar
- def atpeToRtpe(atpe: Type) = atpe.substSym(aparamss.flatten, rparamss.flatten).instantiateTypeParams(atparams, atvars)
-
- try {
- map2(aparamss, rparamss)((aparams, rparams) => {
- if (aparams.length < rparams.length) MacroImplMissingParamsError(aparams, rparams)
- if (rparams.length < aparams.length) MacroImplExtraParamsError(aparams, rparams)
- })
+ assert(macroDef.isMacro, macroDdef)
- // cannot fuse these loops because if aparamss.flatten != rparamss.flatten
- // then `atpeToRtpe` is going to fail with an unsound substitution
- map2(aparamss.flatten, rparamss.flatten)((aparam, rparam) => {
- if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
- if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
- val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
- case tpe => tpe
- }
- checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
- })
-
- checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret)
-
- val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe))
- val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, depth = maxLubDepth)
- val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, ""))
- boundsOk match {
- case SilentResultValue(true) => // do nothing, success
- case SilentResultValue(false) | SilentTypeError(_) => MacroImplTargMismatchError(atargs, atparams)
- }
- } catch {
- case ex: NoInstance => MacroImplTparamInstantiationError(atparams, ex)
- }
- }
-
- /** Macro classloader that is used to resolve and run macro implementations.
- * Loads classes from from -cp (aka the library classpath).
- * Is also capable of detecting REPL and reusing its classloader.
- */
- lazy val macroClassloader: ClassLoader = {
- if (global.forMSIL)
- throw new UnsupportedOperationException("Scala reflection not available on this platform")
-
- val classpath = global.classPath.asURLs
- macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath))
- val loader = ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader)
-
- // a heuristic to detect the REPL
- if (global.settings.exposeEmptyPackage.value) {
- macroLogVerbose("macro classloader: initializing from a REPL classloader".format(global.classPath.asURLs))
- import scala.tools.nsc.interpreter._
- val virtualDirectory = global.settings.outputDirs.getSingleOutput.get
- new AbstractFileClassLoader(virtualDirectory, loader) {}
- } else {
- loader
- }
- }
-
- /** Produces a function that can be used to invoke macro implementation for a given macro definition:
- * 1) Looks up macro implementation symbol in this universe.
- * 2) Loads its enclosing class from the macro classloader.
- * 3) Loads the companion of that enclosing class from the macro classloader.
- * 4) Resolves macro implementation within the loaded companion.
- *
- * @return Requested runtime if macro implementation can be loaded successfully from either of the mirrors,
- * `null` otherwise.
- */
- type MacroRuntime = MacroArgs => Any
- private val macroRuntimesCache = perRunCaches.newWeakMap[Symbol, MacroRuntime]
- private def macroRuntime(macroDef: Symbol): MacroRuntime = {
- macroLogVerbose(s"looking for macro implementation: $macroDef")
+ macroLogVerbose("typechecking macro def %s at %s".format(macroDef, macroDdef.pos))
if (fastTrack contains macroDef) {
- macroLogVerbose("macro expansion is serviced by a fast track")
- fastTrack(macroDef)
+ macroLogVerbose("typecheck terminated unexpectedly: macro is fast track")
+ assert(!macroDdef.tpt.isEmpty, "fast track macros must provide result type")
+ EmptyTree
} else {
- macroRuntimesCache.getOrElseUpdate(macroDef, {
- val binding = loadMacroImplBinding(macroDef)
- val className = binding.className
- val methName = binding.methName
- macroLogVerbose(s"resolved implementation as $className.$methName")
+ def fail() = { if (macroDef != null) macroDef setFlag IS_ERROR; macroDdef setType ErrorType; EmptyTree }
+ def success(macroImplRef: Tree) = { bindMacroImpl(macroDef, macroImplRef); macroImplRef }
- if (binding.className == Predef_???.owner.fullName.toString && binding.methName == Predef_???.name.encoded) {
- args => throw new AbortMacroException(args.c.enclosingPosition, "macro implementation is missing")
- } else {
- // I don't use Scala reflection here, because it seems to interfere with JIT magic
- // whenever you instantiate a mirror (and not do anything with in, just instantiate), performance drops by 15-20%
- // I'm not sure what's the reason - for me it's pure voodoo
- // upd. my latest experiments show that everything's okay
- // it seems that in 2.10.1 we can easily switch to Scala reflection
- try {
- macroLogVerbose(s"loading implementation class: $className")
- macroLogVerbose(s"classloader is: ${ReflectionUtils.show(macroClassloader)}")
- val implObj = ReflectionUtils.staticSingletonInstance(macroClassloader, className)
- // relies on the fact that macro impls cannot be overloaded
- // so every methName can resolve to at maximum one method
- val implMeths = implObj.getClass.getDeclaredMethods.find(_.getName == methName)
- val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") }
- macroLogVerbose(s"successfully loaded macro impl as ($implObj, $implMeth)")
- args => implMeth.invoke(implObj, ((args.c +: args.others) map (_.asInstanceOf[AnyRef])): _*)
- } catch {
- case ex: Exception =>
- macroLogVerbose(s"macro runtime failed to load: ${ex.toString}")
- macroDef setFlag IS_ERROR
- null
- }
- }
- })
+ if (!typer.checkFeature(macroDdef.pos, MacrosFeature, immediate = true)) {
+ macroLogVerbose("typecheck terminated unexpectedly: language.experimental.macros feature is not enabled")
+ fail()
+ } else {
+ val macroDdef1: macroDdef.type = macroDdef
+ val typer1: typer.type = typer
+ val macroCompiler = new {
+ val global: self.global.type = self.global
+ val typer: self.global.analyzer.Typer = typer1.asInstanceOf[self.global.analyzer.Typer]
+ val macroDdef: self.global.DefDef = macroDdef1
+ } with DefaultMacroCompiler
+ val macroImplRef = macroCompiler.resolveMacroImpl
+ if (macroImplRef.isEmpty) fail() else success(macroImplRef)
+ }
}
}
- private def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext =
+ def macroContext(typer: Typer, prefixTree: Tree, expandeeTree: Tree): MacroContext = {
new {
val universe: self.global.type = self.global
val callsiteTyper: universe.analyzer.Typer = typer.asInstanceOf[global.analyzer.Typer]
- val expandee = expandeeTree
+ val expandee = universe.analyzer.macroExpanderAttachment(expandeeTree).original orElse expandeeTree
+ val macroRole = universe.analyzer.macroExpanderAttachment(expandeeTree).role
} with UnaffiliatedMacroContext {
val prefix = Expr[Nothing](prefixTree)(TypeTag.Nothing)
override def toString = "MacroContext(%s@%s +%d)".format(expandee.symbol.name, expandee.pos, enclosingMacros.length - 1 /* exclude myself */)
}
+ }
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
*/
case class MacroArgs(c: MacroContext, others: List[Any])
+
private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
- val macroDef = expandee.symbol
- val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
- val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefixTree, expandee))
- var typeArgs = List[Tree]()
- val exprArgs = ListBuffer[List[Expr[_]]]()
- def collectMacroArgs(tree: Tree): Unit = tree match {
- case Apply(fn, args) =>
- // todo. infer precise typetag for this Expr, namely the declared type of the corresponding macro impl argument
- exprArgs.prepend(args map (arg => context.Expr[Nothing](arg)(TypeTag.Nothing)))
- collectMacroArgs(fn)
- case TypeApply(fn, args) =>
- typeArgs = args
- collectMacroArgs(fn)
- case _ =>
- }
- collectMacroArgs(expandee)
+ val macroDef = expandee.symbol
+ val paramss = macroDef.paramss
+ val treeInfo.Applied(core, targs, argss) = expandee
+ val prefix = core match { case Select(qual, _) => qual; case _ => EmptyTree }
+ val context = expandee.attachments.get[MacroRuntimeAttachment].flatMap(_.macroContext).getOrElse(macroContext(typer, prefix, expandee))
- val argcDoesntMatch = macroDef.paramss.length != exprArgs.length
- val nullaryArgsEmptyParams = exprArgs.isEmpty && macroDef.paramss == ListOfNil
- if (argcDoesntMatch && !nullaryArgsEmptyParams) { typer.TyperErrorGen.MacroPartialApplicationError(expandee) }
+ macroLogVerbose(sm"""
+ |context: $context
+ |prefix: $prefix
+ |targs: $targs
+ |argss: $argss
+ |paramss: $paramss
+ """.trim)
- val argss: List[List[Any]] = exprArgs.toList
- macroLogVerbose(s"context: $context")
- macroLogVerbose(s"argss: $argss")
+ import typer.TyperErrorGen._
+ val isNullaryArgsEmptyParams = argss.isEmpty && paramss == ListOfNil
+ if (paramss.length < argss.length) MacroTooManyArgumentListsError(expandee)
+ if (paramss.length > argss.length && !isNullaryArgsEmptyParams) MacroTooFewArgumentListsError(expandee)
- val preparedArgss: List[List[Any]] =
+ val macroImplArgs: List[Any] =
if (fastTrack contains macroDef) {
- if (fastTrack(macroDef) validate context) argss
- else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
- } else {
- // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
- // consider the following example:
- //
- // class D[T] {
- // class C[U] {
- // def foo[V] = macro Impls.foo[T, U, V]
- // }
- // }
- //
- // val outer1 = new D[Int]
- // val outer2 = new outer1.C[String]
- // outer2.foo[Boolean]
- //
- // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
- // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
- // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
- val binding = loadMacroImplBinding(macroDef)
- macroLogVerbose(s"binding: $binding")
- val tags = binding.signature filter (_ != -1) map (paramPos => {
- val targ = binding.targs(paramPos).tpe.typeSymbol
- val tpe = if (targ.isTypeParameterOrSkolem) {
- if (targ.owner == macroDef) {
- // doesn't work when macro def is compiled separately from its usages
- // then targ is not a skolem and isn't equal to any of macroDef.typeParams
- // val argPos = targ.deSkolemize.paramPos
- val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
- typeArgs(argPos).tpe
+ // Take a dry run of the fast track implementation
+ if (fastTrack(macroDef) validate expandee) argss.flatten
+ else MacroTooFewArgumentListsError(expandee)
+ }
+ else {
+ def calculateMacroArgs(binding: MacroImplBinding) = {
+ val signature = if (binding.isBundle) binding.signature else binding.signature.tail
+ macroLogVerbose(s"binding: $binding")
+
+ // STEP I: prepare value arguments of the macro expansion
+ // wrap argss in c.Expr if necessary (i.e. if corresponding macro impl param is of type c.Expr[T])
+ // expand varargs (nb! varargs can apply to any parameter section, not necessarily to the last one)
+ val trees = map3(argss, paramss, signature)((args, defParams, implParams) => {
+ val isVarargs = isVarArgsList(defParams)
+ if (isVarargs) {
+ if (defParams.length > args.length + 1) MacroTooFewArgumentsError(expandee)
+ } else {
+ if (defParams.length < args.length) MacroTooManyArgumentsError(expandee)
+ if (defParams.length > args.length) MacroTooFewArgumentsError(expandee)
+ }
+
+ val wrappedArgs = mapWithIndex(args)((arg, j) => {
+ val fingerprint = implParams(min(j, implParams.length - 1))
+ fingerprint match {
+ case Lifted => context.Expr[Nothing](arg)(TypeTag.Nothing) // TODO: SI-5752
+ case _ => abort(s"unexpected fingerprint $fingerprint in $binding with paramss being $paramss " +
+ s"corresponding to arg $arg in $argss")
+ }
+ })
+
+ if (isVarargs) {
+ val (normal, varargs) = wrappedArgs splitAt (defParams.length - 1)
+ normal :+ varargs // pack all varargs into a single Seq argument (varargs Scala style)
+ } else wrappedArgs
+ })
+ macroLogVerbose(s"trees: $trees")
+
+ // STEP II: prepare type arguments of the macro expansion
+ // if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
+ // consider the following example:
+ //
+ // class D[T] {
+ // class C[U] {
+ // def foo[V] = macro Impls.foo[T, U, V]
+ // }
+ // }
+ //
+ // val outer1 = new D[Int]
+ // val outer2 = new outer1.C[String]
+ // outer2.foo[Boolean]
+ //
+ // then T and U need to be inferred from the lexical scope of the call using `asSeenFrom`
+ // whereas V won't be resolved by asSeenFrom and need to be loaded directly from `expandee` which needs to contain a TypeApply node
+ // also, macro implementation reference may contain a regular type as a type argument, then we pass it verbatim
+ val tags = signature.flatten collect { case f if f.isTag => f.paramPos } map (paramPos => {
+ val targ = binding.targs(paramPos).tpe.typeSymbol
+ val tpe = if (targ.isTypeParameterOrSkolem) {
+ if (targ.owner == macroDef) {
+ // doesn't work when macro def is compiled separately from its usages
+ // then targ is not a skolem and isn't equal to any of macroDef.typeParams
+ // val argPos = targ.deSkolemize.paramPos
+ val argPos = macroDef.typeParams.indexWhere(_.name == targ.name)
+ targs(argPos).tpe
+ } else
+ targ.tpe.asSeenFrom(
+ if (prefix == EmptyTree) macroDef.owner.tpe else prefix.tpe,
+ macroDef.owner)
} else
- targ.tpe.asSeenFrom(
- if (prefixTree == EmptyTree) macroDef.owner.tpe else prefixTree.tpe,
- macroDef.owner)
- } else
- targ.tpe
- context.WeakTypeTag(tpe)
- })
- macroLogVerbose(s"tags: $tags")
-
- // transforms argss taking into account varargness of paramss
- // note that typetag context bounds are only declared on macroImpls
- // so this optional arglist might not match macroDef's paramlist
- // nb! varargs can apply to any parameter section, not necessarily to the last one
- mapWithIndex(argss :+ tags)((as, i) => {
- val mapsToParamss = macroDef.paramss.indices contains i
- if (mapsToParamss) {
- val ps = macroDef.paramss(i)
- if (isVarArgsList(ps)) {
- val (normal, varargs) = as splitAt (ps.length - 1)
- normal :+ varargs // pack all varargs into a single List argument
- } else as
- } else as
- })
+ targ.tpe
+ context.WeakTypeTag(tpe)
+ })
+ macroLogVerbose(s"tags: $tags")
+
+ // if present, tags always come in a separate parameter/argument list
+ // that's because macro impls can't have implicit parameters other than c.WeakTypeTag[T]
+ (trees :+ tags).flatten
+ }
+
+ val binding = loadMacroImplBinding(macroDef)
+ if (binding.is_???) Nil
+ else calculateMacroArgs(binding)
}
- macroLogVerbose(s"preparedArgss: $preparedArgss")
- MacroArgs(context, preparedArgss.flatten)
+ macroLogVerbose(s"macroImplArgs: $macroImplArgs")
+ MacroArgs(context, macroImplArgs)
}
/** Keeps track of macros in-flight.
@@ -662,21 +459,37 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
private def popMacroContext() = _openMacros = _openMacros.tail
def enclosingMacroPosition = openMacros map (_.macroApplication.pos) find (_ ne NoPosition) getOrElse NoPosition
- private sealed abstract class MacroExpansionResult
- private case class Success(expanded: Tree) extends MacroExpansionResult
- private case class Delay(delayed: Tree) extends MacroExpansionResult
- private case class Fallback(fallback: Tree) extends MacroExpansionResult { currentRun.seenMacroExpansionsFallingBack = true }
- private case class Other(result: Tree) extends MacroExpansionResult
- private def Skip(expanded: Tree) = Other(expanded)
- private def Cancel(expandee: Tree) = Other(expandee)
- private def Failure(expandee: Tree) = Other(expandee)
+ /** Describes the role that the macro expandee is performing.
+ */
+ type MacroRole = String
+ final def APPLY_ROLE: MacroRole = "APPLY_ROLE"
+ private val roleNames = Map(APPLY_ROLE -> "apply")
/** Performs macro expansion:
- * 1) Checks whether the expansion needs to be delayed (see `mustDelayMacroExpansion`)
- * 2) Loads macro implementation using `macroMirror`
- * 3) Synthesizes invocation arguments for the macro implementation
- * 4) Checks that the result is a tree bound to this universe
- * 5) Typechecks the result against the return type of the macro definition
+ *
+ * ========= Expandable trees =========
+ *
+ * A term of one of the following shapes:
+ *
+ * Ident(<term macro>)
+ * Select(<any qualifier>, <term macro>)
+ * TypeApply(<any of the above>, <targs>)
+ * Apply(...Apply(<any of the above>, <args1>)...<argsN>)
+ *
+ * ========= Macro expansion =========
+ *
+ * First of all `macroExpandXXX`:
+ * 1) If necessary desugars the `expandee` to fit into `macroExpand1`
+ *
+ * Then `macroExpand1`:
+ * 2) Checks whether the expansion needs to be delayed
+ * 3) Loads macro implementation using `macroMirror`
+ * 4) Synthesizes invocation arguments for the macro implementation
+ * 5) Checks that the result is a tree or an expr bound to this universe
+ *
+ * Finally `macroExpandXXX`:
+ * 6) Validates the expansion against the white list of supported tree shapes
+ * 7) Typechecks the result as required by the circumstances of the macro application
*
* If -Ymacro-debug-lite is enabled, you will get basic notifications about macro expansion
* along with macro expansions logged in the form that can be copy/pasted verbatim into REPL.
@@ -687,109 +500,149 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
*
* @return
* the expansion result if the expansion has been successful,
- * the fallback method invocation if the expansion has been unsuccessful, but there is a fallback,
+ * the fallback tree if the expansion has been unsuccessful, but there is a fallback,
* the expandee unchanged if the expansion has been delayed,
* the expandee fully expanded if the expansion has been delayed before and has been expanded now,
* the expandee with an error marker set if the expansion has been cancelled due malformed arguments or implementation
* the expandee with an error marker set if there has been an error
*/
- def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
- if (settings.Ymacronoexpand.value) return expandee // SI-6812
- val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
- if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
- try {
- macroExpand1(typer, expandee) match {
- case Success(expanded) =>
- try {
- def typecheck(phase: String, tree: Tree, pt: Type): Tree = {
- if (tree.isErroneous) return tree
- macroLogVerbose(s"typechecking against $phase $pt: $expanded")
- val numErrors = reporter.ERROR.count
- def hasNewErrors = reporter.ERROR.count > numErrors
- val result = typer.context.withImplicitsEnabled(typer.typed(tree, EXPRmode, pt))
- macroLogVerbose(s"""${if (hasNewErrors) "failed to typecheck" else "successfully typechecked"} against $phase $pt:\n$result""")
- result
+ private abstract class MacroExpander[Result: ClassTag](val role: MacroRole, val typer: Typer, val expandee: Tree) {
+ def allowExpandee(expandee: Tree): Boolean = true
+ def allowExpanded(expanded: Tree): Boolean = true
+ def allowedExpansions: String = "anything"
+ def allowResult(result: Result): Boolean = true
+
+ def onSuccess(expanded: Tree): Result
+ def onFallback(expanded: Tree): Result
+ def onSuppressed(expandee: Tree): Result = expandee match { case expandee: Result => expandee }
+ def onDelayed(expanded: Tree): Result = expanded match { case expanded: Result => expanded }
+ def onSkipped(expanded: Tree): Result = expanded match { case expanded: Result => expanded }
+ def onFailure(expanded: Tree): Result = { typer.infer.setError(expandee); expandee match { case expandee: Result => expandee } }
+
+ def apply(desugared: Tree): Result = {
+ if (isMacroExpansionSuppressed(desugared)) onSuppressed(expandee)
+ else expand(desugared)
+ }
+
+ protected def expand(desugared: Tree): Result = {
+ def showDetailed(tree: Tree) = showRaw(tree, printIds = true, printTypes = true)
+ def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}"
+ if (macroDebugVerbose) println(s"macroExpand: ${summary()}")
+ assert(allowExpandee(expandee), summary())
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
+ if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
+ try {
+ linkExpandeeAndDesugared(expandee, desugared, role)
+ macroExpand1(typer, desugared) match {
+ case Success(expanded) =>
+ if (allowExpanded(expanded)) {
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
+ if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
+ if (allowResult(expanded1)) expanded1 else onFailure(expanded)
+ } else {
+ typer.TyperErrorGen.MacroInvalidExpansionError(expandee, roleNames(role), allowedExpansions)
+ onFailure(expanded)
}
+ case Fallback(fallback) => onFallback(fallback)
+ case Delayed(delayed) => onDelayed(delayed)
+ case Skipped(skipped) => onSkipped(skipped)
+ case Failure(failure) => onFailure(failure)
+ }
+ } finally {
+ if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
+ }
+ }
+ }
- var expectedTpe = expandee.tpe
- if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
- var typechecked = typecheck("macro def return type", expanded, expectedTpe)
- typechecked = typecheck("expected type", typechecked, pt)
- typechecked
- } finally {
- popMacroContext()
- }
- case Delay(delayed) =>
- // =========== THE SITUATION ===========
- //
- // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
- // then there are two possible situations we're in:
- //
- // 1) We're in POLYmode, when the typer tests the waters wrt type inference
- // (e.g. as in typedArgToPoly in doTypedApply).
- //
- // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
- // (e.g. if we're an argument to a function call, then this means that no previous argument lists
- // can determine our type variables for us).
- //
- // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
- // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
- //
- // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
- // the undetermined type params. Therefore we need to do something ourselves or otherwise this
- // expandee will forever remaing not expanded (see SI-5692).
- //
- // A traditional way out of this conundrum is to call `instantiate` and let the inferencer
- // try to find the way out. It works for simple cases, but sometimes, if the inferencer lacks
- // information, it will be forced to approximate.
- //
- // =========== THE PROBLEM ===========
- //
- // Consider the following example (thanks, Miles!):
- //
- // // Iso represents an isomorphism between two datatypes:
- // // 1) An arbitrary one (e.g. a random case class)
- // // 2) A uniform representation for all datatypes (e.g. an HList)
- // trait Iso[T, U] {
- // def to(t : T) : U
- // def from(u : U) : T
- // }
- // implicit def materializeIso[T, U]: Iso[T, U] = macro ???
- //
- // case class Foo(i: Int, s: String, b: Boolean)
- // def foo[C, L](c: C)(implicit iso: Iso[C, L]): L = iso.to(c)
- // foo(Foo(23, "foo", true))
- //
- // In the snippet above, even though we know that there's a fundep going from T to U
- // (in a sense that a datatype's uniform representation is unambiguously determined by the datatype,
- // e.g. for Foo it will be Int :: String :: Boolean :: HNil), there's no way to convey this information
- // to the typechecker. Therefore the typechecker will infer Nothing for L, which is hardly what we want.
- val shouldInstantiate = typer.context.undetparams.nonEmpty && !inPolyMode(mode)
- if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
- else delayed
- case Fallback(fallback) =>
- typer.context.withImplicitsEnabled(typer.typed(fallback, EXPRmode, pt))
- case Other(result) =>
- result
+ /** Expands a tree that carries a term, which happens to be a term macro.
+ * @see MacroExpander
+ */
+ private abstract class TermMacroExpander(role: MacroRole, typer: Typer, expandee: Tree, mode: Mode, pt: Type)
+ extends MacroExpander[Tree](role, typer, expandee) {
+ override def allowedExpansions: String = "term trees"
+ override def allowExpandee(expandee: Tree) = expandee.isTerm
+ override def onSuccess(expanded: Tree) = typer.typed(expanded, mode, pt)
+ override def onFallback(fallback: Tree) = typer.typed(fallback, mode, pt)
+ }
+
+ /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`.
+ * @see MacroExpander
+ */
+ def macroExpandApply(typer: Typer, expandee: Tree, mode: Mode, pt: Type) = {
+ object expander extends TermMacroExpander(APPLY_ROLE, typer, expandee, mode, pt) {
+ override def onSuccess(expanded: Tree) = {
+ // prematurely annotate the tree with a macro expansion attachment
+ // so that adapt called indirectly by typer.typed knows that it needs to apply the existential fixup
+ linkExpandeeAndExpanded(expandee, expanded)
+ var expectedTpe = expandee.tpe
+ if (isNullaryInvocation(expandee)) expectedTpe = expectedTpe.finalResultType
+ // `macroExpandApply` is called from `adapt`, where implicit conversions are disabled
+ // therefore we need to re-enable the conversions back temporarily
+ if (macroDebugVerbose) println(s"typecheck #1 (against expectedTpe = $expectedTpe): $expanded")
+ val expanded1 = typer.context.withImplicitsEnabled(typer.typed(expanded, mode, expectedTpe))
+ if (expanded1.isErrorTyped) {
+ if (macroDebugVerbose) println(s"typecheck #1 has failed: ${typer.context.reportBuffer.errors}")
+ expanded1
+ } else {
+ if (macroDebugVerbose) println(s"typecheck #2 (against pt = $pt): $expanded1")
+ val expanded2 = typer.context.withImplicitsEnabled(super.onSuccess(expanded1))
+ if (macroDebugVerbose && expanded2.isErrorTyped) println(s"typecheck #2 has failed: ${typer.context.reportBuffer.errors}")
+ expanded2
+ }
+ }
+ override def onDelayed(delayed: Tree) = {
+ // If we've been delayed (i.e. bailed out of the expansion because of undetermined type params present in the expandee),
+ // then there are two possible situations we're in:
+ // 1) We're in POLYmode, when the typer tests the waters wrt type inference
+ // (e.g. as in typedArgToPoly in doTypedApply).
+ // 2) We're out of POLYmode, which means that the typer is out of tricks to infer our type
+ // (e.g. if we're an argument to a function call, then this means that no previous argument lists
+ // can determine our type variables for us).
+ //
+ // Situation #1 is okay for us, since there's no pressure. In POLYmode we're just verifying that
+ // there's nothing outrageously wrong with our undetermined type params (from what I understand!).
+ //
+ // Situation #2 requires measures to be taken. If we're in it, then noone's going to help us infer
+ // the undetermined type params. Therefore we need to do something ourselves or otherwise this
+ // expandee will forever remaing not expanded (see SI-5692). A traditional way out of this conundrum
+ // is to call `instantiate` and let the inferencer try to find the way out. It works for simple cases,
+ // but sometimes, if the inferencer lacks information, it will be forced to approximate. This prevents
+ // an important class of macros, fundep materializers, from working, which I perceive is a problem we need to solve.
+ // For details see SI-7470.
+ val shouldInstantiate = typer.context.undetparams.nonEmpty && !mode.inPolyMode
+ if (shouldInstantiate) typer.instantiatePossiblyExpectingUnit(delayed, mode, pt)
+ else delayed
}
- } finally {
- if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
}
+ expander(expandee)
}
+ /** Captures statuses of macro expansions performed by `macroExpand1'.
+ */
+ private sealed abstract class MacroStatus(val result: Tree)
+ private case class Success(expanded: Tree) extends MacroStatus(expanded)
+ private case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
+ private case class Delayed(delayed: Tree) extends MacroStatus(delayed)
+ private case class Skipped(skipped: Tree) extends MacroStatus(skipped)
+ private case class Failure(failure: Tree) extends MacroStatus(failure)
+ private def Delay(expanded: Tree) = Delayed(expanded)
+ private def Skip(expanded: Tree) = Skipped(expanded)
+ private def Cancel(expandee: Tree) = Failure(expandee)
+
/** Does the same as `macroExpand`, but without typechecking the expansion
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpand1(typer: Typer, expandee: Tree): MacroExpansionResult =
+ private def macroExpand1(typer: Typer, expandee: Tree): MacroStatus = {
// verbose printing might cause recursive macro expansions, so I'm shutting it down here
withInfoLevel(nodePrinters.InfoLevel.Quiet) {
if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
- return Cancel(typer.infer.setError(expandee))
+ Cancel(typer.infer.setError(expandee))
}
-
- try {
+ else try {
val runtime = macroRuntime(expandee.symbol)
if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
else macroExpandWithoutRuntime(typer, expandee)
@@ -797,11 +650,12 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
}
}
+ }
/** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroExpansionResult = {
+ private def macroExpandWithRuntime(typer: Typer, expandee: Tree, runtime: MacroRuntime): MacroStatus = {
val wasDelayed = isDelayed(expandee)
val undetparams = calculateUndetparams(expandee)
val nowDelayed = !typer.context.macrosEnabled || undetparams.nonEmpty
@@ -827,15 +681,16 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
def hasNewErrors = reporter.ERROR.count > numErrors
val expanded = { pushMacroContext(args.c); runtime(args) }
if (hasNewErrors) MacroGeneratedTypeError(expandee)
+ def validateResultingTree(expanded: Tree) = {
+ macroLogVerbose("original:")
+ macroLogLite("" + expanded + "\n" + showRaw(expanded))
+ val freeSyms = expanded.freeTerms ++ expanded.freeTypes
+ freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
+ Success(atPos(enclosingMacroPosition.focus)(expanded))
+ }
expanded match {
- case expanded: Expr[_] =>
- macroLogVerbose("original:")
- macroLogLite("" + expanded.tree + "\n" + showRaw(expanded.tree))
- val freeSyms = expanded.tree.freeTerms ++ expanded.tree.freeTypes
- freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym))
- Success(atPos(enclosingMacroPosition.focus)(expanded.tree updateAttachment MacroExpansionAttachment(expandee)))
- case _ =>
- MacroExpansionIsNotExprError(expandee, expanded)
+ case expanded: Expr[_] if expandee.symbol.isTermMacro => validateResultingTree(expanded.tree)
+ case _ => MacroExpansionHasInvalidTypeError(expandee, expanded)
}
} catch {
case ex: Throwable =>
@@ -856,7 +711,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
/** Expands a macro when a runtime (i.e. the macro implementation) cannot be loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
*/
- private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroExpansionResult = {
+ private def macroExpandWithoutRuntime(typer: Typer, expandee: Tree): MacroStatus = {
import typer.TyperErrorGen._
val fallbackSym = expandee.symbol.nextOverriddenSymbol orElse MacroImplementationNotFoundError(expandee)
macroLogLite(s"falling back to: $fallbackSym")
@@ -884,7 +739,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* 2) undetparams (sym.isTypeParameter && !sym.isSkolem)
*/
var hasPendingMacroExpansions = false
- private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]
+ private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]()
private def isDelayed(expandee: Tree) = delayed contains expandee
private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] =
delayed.get(expandee).getOrElse {
@@ -897,7 +752,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
macroLogVerbose("calculateUndetparams: %s".format(calculated))
calculated map (_.id)
}
- private val undetparams = perRunCaches.newSet[Int]
+ private val undetparams = perRunCaches.newSet[Int]()
def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = {
undetparams ++= newUndets map (_.id)
if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym)))
@@ -926,13 +781,13 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
new Transformer {
override def transform(tree: Tree) = super.transform(tree match {
// todo. expansion should work from the inside out
- case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty =>
+ case tree if (delayed contains tree) && calculateUndetparams(tree).isEmpty && !tree.isErroneous =>
val context = tree.attachments.get[MacroRuntimeAttachment].get.typerContext
delayed -= tree
context.implicitsEnabled = typer.context.implicitsEnabled
context.enrichmentEnabled = typer.context.enrichmentEnabled
context.macrosEnabled = typer.context.macrosEnabled
- macroExpand(newTyper(context), tree, EXPRmode, WildcardType)
+ macroExpandApply(newTyper(context), tree, EXPRmode, WildcardType)
case _ =>
tree
})
@@ -944,3 +799,21 @@ object MacrosStats {
val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer")
val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
}
+
+class Fingerprint(val value: Int) extends AnyVal {
+ def paramPos = { assert(isTag, this); value }
+ def isTag = value >= 0
+ def isOther = this == Other
+ def isExpr = this == Lifted
+ override def toString = this match {
+ case Other => "Other"
+ case Lifted => "Expr"
+ case _ => s"Tag($value)"
+ }
+}
+
+object Fingerprint {
+ def Tagged(tparamPos: Int) = new Fingerprint(tparamPos)
+ val Other = new Fingerprint(-1)
+ val Lifted = new Fingerprint(-2)
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 99557d1527..646bf3a153 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -6,7 +6,6 @@ package scala.tools.nsc
package typechecker
import symtab.Flags._
-import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.StringOps.{ ojoin }
import scala.reflect.ClassTag
import scala.reflect.runtime.{ universe => ru }
@@ -30,66 +29,30 @@ trait MethodSynthesis {
if (sym.isLazy) ValDef(sym, body)
else DefDef(sym, body)
- def applyTypeInternal(tags: List[TT[_]]): Type = {
- val symbols = tags map compilerSymbolFromTag
- val container :: args = symbols
- val tparams = container.typeConstructor.typeParams
-
- // Conservative at present - if manifests were more usable this could do a lot more.
- // [Eugene to Paul] all right, they are now. what do you have in mind?
- require(symbols forall (_ ne NoSymbol), "Must find all tags: " + symbols)
- require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container)
- require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args)
-
- appliedType(container, args map (_.tpe): _*)
- }
-
- def companionType[T](implicit ct: CT[T]) =
- rootMirror.getRequiredModule(ct.runtimeClass.getName).tpe
-
- // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]`
- def applyType[CC](implicit t1: TT[CC]): Type =
- applyTypeInternal(List(t1))
-
- def applyType[CC[X1], X1](implicit t1: TT[CC[_]], t2: TT[X1]): Type =
- applyTypeInternal(List(t1, t2))
-
- def applyType[CC[X1, X2], X1, X2](implicit t1: TT[CC[_,_]], t2: TT[X1], t3: TT[X2]): Type =
- applyTypeInternal(List(t1, t2, t3))
-
- def applyType[CC[X1, X2, X3], X1, X2, X3](implicit t1: TT[CC[_,_,_]], t2: TT[X1], t3: TT[X2], t4: TT[X3]): Type =
- applyTypeInternal(List(t1, t2, t3, t4))
-
- def newMethodType[F](owner: Symbol)(implicit t: TT[F]): Type = {
- val fnSymbol = compilerSymbolFromTag(t)
- val formals = compilerTypeFromTag(t).typeArguments
- assert(fnSymbol isSubClass FunctionClass(formals.size - 1), (owner, t))
- val params = owner newSyntheticValueParams formals
- MethodType(params, formals.last)
- }
-
- /** The annotations amongst those found on the original symbol which
- * should be propagated to this kind of accessor.
- */
- def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
- initial filter { ann =>
- // There are no meta-annotation arguments attached to `ann`
- if (ann.metaAnnotations.isEmpty) {
- // A meta-annotation matching `annotKind` exists on `ann`'s definition.
- (ann.defaultTargets contains category) ||
- // `ann`'s definition has no meta-annotations, and `keepClean` is true.
- (ann.defaultTargets.isEmpty && keepClean)
- }
- // There are meta-annotation arguments, and one of them matches `annotKind`
- else ann.metaAnnotations exists (_ matches category)
+ /** The annotations amongst those found on the original symbol which
+ * should be propagated to this kind of accessor.
+ */
+ def deriveAnnotations(initial: List[AnnotationInfo], category: Symbol, keepClean: Boolean): List[AnnotationInfo] = {
+ initial filter { ann =>
+ // There are no meta-annotation arguments attached to `ann`
+ if (ann.metaAnnotations.isEmpty) {
+ // A meta-annotation matching `annotKind` exists on `ann`'s definition.
+ (ann.defaultTargets contains category) ||
+ // `ann`'s definition has no meta-annotations, and `keepClean` is true.
+ (ann.defaultTargets.isEmpty && keepClean)
}
+ // There are meta-annotation arguments, and one of them matches `annotKind`
+ else ann.metaAnnotations exists (_ matches category)
}
- }
+ }
+ }
import synthesisUtil._
class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) {
def mkThis = This(clazz) setPos clazz.pos.focus
- def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(Select(mkThis, sym))
+ def mkThisSelect(sym: Symbol) = atPos(clazz.pos.focus)(
+ if (clazz.isClass) Select(This(clazz), sym) else Ident(sym)
+ )
private def isOverride(name: TermName) =
clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz))
@@ -99,7 +62,7 @@ trait MethodSynthesis {
overrideFlag | SYNTHETIC
}
def newMethodFlags(method: Symbol) = {
- val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L
+ val overrideFlag = if (isOverride(method.name.toTermName)) OVERRIDE else 0L
(method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED
}
@@ -107,11 +70,13 @@ trait MethodSynthesis {
localTyper typed ValOrDefDef(method, f(method))
private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = {
- val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ val name1 = name.toTermName
+ val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter info, f)
}
private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = {
- val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name))
+ val name1 = name.toTermName
+ val m = clazz.newMethod(name1, clazz.pos.focus, newMethodFlags(name1))
finishMethod(m setInfoAndEnter infoFn(m), f)
}
private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = {
@@ -119,22 +84,9 @@ trait MethodSynthesis {
finishMethod(clazz.info.decls enter m, f)
}
- private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree =
- cloneInternal(original, f, original.name)
-
def clazzMember(name: Name) = clazz.info nonPrivateMember name
def typeInClazz(sym: Symbol) = clazz.thisType memberType sym
- /** Function argument takes the newly created method symbol of
- * the same type as `name` in clazz, and returns the tree to be
- * added to the template.
- */
- def overrideMethod(name: Name)(f: Symbol => Tree): Tree =
- overrideMethod(clazzMember(name))(f)
-
- def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree =
- cloneInternal(original, sym => f(sym setFlag OVERRIDE))
-
def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree =
cloneInternal(original, f, nameFn(original.name))
@@ -151,7 +103,7 @@ trait MethodSynthesis {
createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident)))
def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = {
- createMethod(name, List(IntClass.tpe), returnType) { m =>
+ createMethod(name, List(IntTpe), returnType) { m =>
val arg0 = Ident(m.firstParam)
val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0)
val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default
@@ -174,7 +126,7 @@ trait MethodSynthesis {
/** There are two key methods in here.
*
- * 1) Enter methods such as enterGetterSetterare called
+ * 1) Enter methods such as enterGetterSetter are called
* from Namer with a tree which may generate further trees such as accessors or
* implicit wrappers. Some setup is performed. In general this creates symbols
* and enters them into the scope of the owner.
@@ -219,14 +171,46 @@ trait MethodSynthesis {
enterBeans(tree)
}
+ /** This is called for those ValDefs which addDerivedTrees ignores, but
+ * which might have a warnable annotation situation.
+ */
+ private def warnForDroppedAnnotations(tree: Tree) {
+ val annotations = tree.symbol.initialize.annotations
+ val targetClass = defaultAnnotationTarget(tree)
+ val retained = deriveAnnotations(annotations, targetClass, keepClean = true)
+
+ annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass))
+ }
+ private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) {
+ global.reporter.warning(ann.pos,
+ s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " +
+ s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})")
+ }
+
def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match {
case vd @ ValDef(mods, name, tpt, rhs) if !noFinishGetterSetter(vd) =>
// If we don't save the annotations, they seem to wander off.
val annotations = stat.symbol.initialize.annotations
- ( allValDefDerived(vd)
+ val trees = (
+ allValDefDerived(vd)
map (acc => atPos(vd.pos.focus)(acc derive annotations))
filterNot (_ eq EmptyTree)
)
+ // Verify each annotation landed safely somewhere, else warn.
+ // Filtering when isParamAccessor is a necessary simplification
+ // because there's a bunch of unwritten annotation code involving
+ // the propagation of annotations - constructor parameter annotations
+ // may need to make their way to parameters of the constructor as
+ // well as fields of the class, etc.
+ if (!mods.isParamAccessor) annotations foreach (ann =>
+ if (!trees.exists(_.symbol hasAnnotation ann.symbol))
+ issueAnnotationWarning(vd, ann, GetterTargetClass)
+ )
+
+ trees
+ case vd: ValDef =>
+ warnForDroppedAnnotations(vd)
+ vd :: Nil
case cd @ ClassDef(mods, _, _, _) if mods.isImplicit =>
val annotations = stat.symbol.initialize.annotations
// TODO: need to shuffle annotations between wrapper and class.
@@ -253,8 +237,7 @@ trait MethodSynthesis {
)
def beanAccessors(vd: ValDef): List[DerivedFromValDef] = {
val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil
- if (forMSIL) Nil
- else if (vd.symbol hasAnnotation BeanPropertyAttr)
+ if (vd.symbol hasAnnotation BeanPropertyAttr)
BeanGetter(vd) :: setter
else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr)
BooleanBeanGetter(vd) :: setter
@@ -312,7 +295,6 @@ trait MethodSynthesis {
// Final methods to make the rest easier to reason about.
final def mods = tree.mods
final def basisSym = tree.symbol
- final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
}
trait DerivedFromClassDef extends DerivedFromMemberDef {
@@ -400,7 +382,7 @@ trait MethodSynthesis {
def name = tree.name
def category = GetterTargetClass
def flagsMask = GetterFlags
- def flagsExtra = ACCESSOR | ( if (tree.mods.isMutable) 0 else STABLE )
+ def flagsExtra = ACCESSOR.toLong | ( if (tree.mods.isMutable) 0 else STABLE )
override def validate() {
assert(derivedSym != NoSymbol, tree)
@@ -444,7 +426,7 @@ trait MethodSynthesis {
Nil,
Nil,
tpt,
- if (mods.isDeferred) EmptyTree else gen.mkCheckInit(fieldSelection)
+ if (mods.isDeferred) EmptyTree else fieldSelection
) setSymbol derivedSym
}
}
@@ -458,7 +440,7 @@ trait MethodSynthesis {
case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) {
class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol)
extends ChangeOwnerTraverser(oldowner, newowner) {
-
+
override def traverse(tree: Tree) {
tree match {
case _: DefTree => change(tree.symbol.moduleClass)
@@ -489,7 +471,7 @@ trait MethodSynthesis {
}
}
case class Setter(tree: ValDef) extends DerivedSetter {
- def name = nme.getterToSetter(tree.name)
+ def name = tree.setterName
def category = SetterTargetClass
def flagsMask = SetterFlags
def flagsExtra = ACCESSOR
@@ -497,7 +479,7 @@ trait MethodSynthesis {
override def derivedSym = basisSym.setter(enclClass)
}
case class Field(tree: ValDef) extends DerivedFromValDef {
- def name = nme.getterToLocal(tree.name)
+ def name = tree.localName
def category = FieldTargetClass
def flagsMask = FieldFlags
def flagsExtra = PrivateLocal
@@ -558,7 +540,7 @@ trait MethodSynthesis {
// No Symbols available.
private def beanAccessorsFromNames(tree: ValDef) = {
- val ValDef(mods, name, tpt, _) = tree
+ val ValDef(mods, _, _, _) = tree
val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot
val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot
@@ -575,9 +557,6 @@ trait MethodSynthesis {
}
protected def enterBeans(tree: ValDef) {
- if (forMSIL)
- return
-
val ValDef(mods, name, _, _) = tree
val beans = beanAccessorsFromNames(tree)
if (beans.nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Modes.scala b/src/compiler/scala/tools/nsc/typechecker/Modes.scala
deleted file mode 100644
index d650762ac1..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/Modes.scala
+++ /dev/null
@@ -1,140 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-/** Mode constants.
- */
-trait Modes {
- /** NOmode, EXPRmode and PATTERNmode are mutually exclusive.
- */
- final val NOmode = 0x000
- final val EXPRmode = 0x001
- final val PATTERNmode = 0x002
-
- /** TYPEmode needs a comment. <-- XXX.
- */
- final val TYPEmode = 0x004
-
- /** SCCmode is orthogonal to above. When set we are
- * in the this or super constructor call of a constructor.
- */
- final val SCCmode = 0x008
-
- /** FUNmode is orthogonal to above.
- * When set we are looking for a method or constructor.
- */
- final val FUNmode = 0x010
-
- /** POLYmode is orthogonal to above.
- * When set expression types can be polymorphic.
- */
- final val POLYmode = 0x020
-
- /** QUALmode is orthogonal to above. When set
- * expressions may be packages and Java statics modules.
- */
- final val QUALmode = 0x040
-
- /** TAPPmode is set for the function/type constructor
- * part of a type application. When set we do not decompose PolyTypes.
- */
- final val TAPPmode = 0x080
-
- /** SUPERCONSTRmode is set for the super
- * in a superclass constructor call super.<init>.
- */
- final val SUPERCONSTRmode = 0x100
-
- /** SNDTRYmode indicates that an application is typed for the 2nd time.
- * In that case functions may no longer be coerced with implicit views.
- */
- final val SNDTRYmode = 0x200
-
- /** LHSmode is set for the left-hand side of an assignment.
- */
- final val LHSmode = 0x400
-
- /** STARmode is set when star patterns are allowed.
- * (This was formerly called REGPATmode.)
- */
- final val STARmode = 0x1000
-
- /** ALTmode is set when we are under a pattern alternative.
- */
- final val ALTmode = 0x2000
-
- /** HKmode is set when we are typing a higher-kinded type.
- * adapt should then check kind-arity based on the prototypical type's
- * kind arity. Type arguments should not be inferred.
- */
- final val HKmode = 0x4000 // @M: could also use POLYmode | TAPPmode
-
- /** BYVALmode is set when we are typing an expression
- * that occurs in a by-value position. An expression e1 is in by-value
- * position within expression e2 iff it will be reduced to a value at that
- * position during the evaluation of e2. Examples are by-value function
- * arguments or the conditional of an if-then-else clause.
- * This mode has been added to support continuations.
- */
- final val BYVALmode = 0x8000
-
- /** TYPEPATmode is set when we are typing a type in a pattern.
- */
- final val TYPEPATmode = 0x10000
-
- /** RETmode is set when we are typing a return expression.
- */
- final val RETmode = 0x20000
-
- final private val StickyModes = EXPRmode | PATTERNmode | TYPEmode | ALTmode
-
- final def onlyStickyModes(mode: Int) =
- mode & StickyModes
-
- final def forFunMode(mode: Int) =
- mode & (StickyModes | SCCmode) | FUNmode | POLYmode | BYVALmode
-
- final def forTypeMode(mode: Int) =
- if (inAnyMode(mode, PATTERNmode | TYPEPATmode)) TYPEmode | TYPEPATmode
- else TYPEmode
-
- final def inAllModes(mode: Int, required: Int) = (mode & required) == required
- final def inAnyMode(mode: Int, required: Int) = (mode & required) != 0
- final def inNoModes(mode: Int, prohibited: Int) = (mode & prohibited) == 0
- final def inHKMode(mode: Int) = (mode & HKmode) != 0
- final def inFunMode(mode: Int) = (mode & FUNmode) != 0
- final def inPolyMode(mode: Int) = (mode & POLYmode) != 0
- final def inPatternMode(mode: Int) = (mode & PATTERNmode) != 0
- final def inExprModeOr(mode: Int, others: Int) = (mode & (EXPRmode | others)) != 0
- final def inExprModeButNot(mode: Int, prohibited: Int) =
- (mode & (EXPRmode | prohibited)) == EXPRmode
-
- /** Translates a mask of mode flags into something readable.
- */
- private val modeNameMap = Map[Int, String](
- (1 << 0) -> "EXPRmode",
- (1 << 1) -> "PATTERNmode",
- (1 << 2) -> "TYPEmode",
- (1 << 3) -> "SCCmode",
- (1 << 4) -> "FUNmode",
- (1 << 5) -> "POLYmode",
- (1 << 6) -> "QUALmode",
- (1 << 7) -> "TAPPmode",
- (1 << 8) -> "SUPERCONSTRmode",
- (1 << 9) -> "SNDTRYmode",
- (1 << 10) -> "LHSmode",
- (1 << 11) -> "<DOES NOT EXIST mode>",
- (1 << 12) -> "STARmode",
- (1 << 13) -> "ALTmode",
- (1 << 14) -> "HKmode",
- (1 << 15) -> "BYVALmode",
- (1 << 16) -> "TYPEPATmode"
- )
- def modeString(mode: Int): String =
- if (mode == 0) "NOmode"
- else (modeNameMap filterKeys (bit => inAllModes(mode, bit))).values mkString " "
-}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 379f56521b..4683fca192 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -8,9 +8,8 @@ package typechecker
import scala.collection.mutable
import scala.annotation.tailrec
-import scala.ref.WeakReference
import symtab.Flags._
-import scala.tools.nsc.io.AbstractFile
+import scala.language.postfixOps
/** This trait declares methods to create symbols and to enter them into scopes.
*
@@ -49,10 +48,10 @@ trait Namers extends MethodSynthesis {
private class NormalNamer(context: Context) extends Namer(context)
def newNamer(context: Context): Namer = new NormalNamer(context)
- def newNamerFor(context: Context, tree: Tree): Namer =
- newNamer(context.makeNewScope(tree, tree.symbol))
abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { thisNamer =>
+ // overridden by the presentation compiler
+ def saveDefaultGetter(meth: Symbol, default: Symbol) { }
import NamerErrorGen._
val typer = newTyper(context)
@@ -150,7 +149,7 @@ trait Namers extends MethodSynthesis {
sym reset NoType setFlag newFlags setPos pos
sym.moduleClass andAlso (updatePosFlags(_, pos, moduleClassFlags(flags)))
- if (sym.owner.isPackageClass) {
+ if (sym.isTopLevel) {
companionSymbolOf(sym, context) andAlso { companion =>
val assignNoType = companion.rawInfo match {
case _: SymLoader => true
@@ -173,21 +172,24 @@ trait Namers extends MethodSynthesis {
else innerNamer
}
+ // FIXME - this logic needs to be thoroughly explained
+ // and justified. I know it's wrong with repect to package
+ // objects, but I think it's also wrong in other ways.
protected def conflict(newS: Symbol, oldS: Symbol) = (
( !oldS.isSourceMethod
|| nme.isSetterName(newS.name)
- || newS.owner.isPackageClass
+ || newS.isTopLevel
) &&
!( // @M: allow repeated use of `_` for higher-order type params
(newS.owner.isTypeParameter || newS.owner.isAbstractType)
// FIXME: name comparisons not successful, are these underscores
// sometimes nme.WILDCARD and sometimes tpnme.WILDCARD?
- && (newS.name.toString == nme.WILDCARD.toString)
+ && (newS.name string_== nme.WILDCARD)
)
)
private def allowsOverload(sym: Symbol) = (
- sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass
+ sym.isSourceMethod && sym.owner.isClass && !sym.isTopLevel
)
private def inCurrentScope(m: Symbol): Boolean = {
@@ -200,6 +202,19 @@ trait Namers extends MethodSynthesis {
/** Enter symbol into given scope and return symbol itself */
def enterInScope(sym: Symbol, scope: Scope): Symbol = {
+ // FIXME - this is broken in a number of ways.
+ //
+ // 1) If "sym" allows overloading, that is not itself sufficient to skip
+ // the check, because "prev.sym" also must allow overloading.
+ //
+ // 2) There is nothing which reconciles a package's scope with
+ // the package object's scope. This is the source of many bugs
+ // with e.g. defining a case class in a package object. When
+ // compiling against classes, the class symbol is created in the
+ // package and in the package object, and the conflict is undetected.
+ // There is also a non-deterministic outcome for situations like
+ // an object with the same name as a method in the package object.
+
// allow for overloaded methods
if (!allowsOverload(sym)) {
val prev = scope.lookupEntry(sym.name)
@@ -239,7 +254,7 @@ trait Namers extends MethodSynthesis {
case DocDef(_, defn) => enterSym(defn)
case tree @ Import(_, _) =>
assignSymbol(tree)
- returnContext = context.makeNewImport(tree)
+ returnContext = context.make(tree)
case _ =>
}
returnContext
@@ -300,15 +315,15 @@ trait Namers extends MethodSynthesis {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => owner.newConstructor(pos, flags)
case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags)
case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags)
- case ModuleDef(_, _, _) => owner.newModule(name, pos, flags)
+ case ModuleDef(_, _, _) => owner.newModule(name.toTermName, pos, flags)
case PackageDef(pid, _) => createPackageSymbol(pos, pid)
case ValDef(_, _, _, _) =>
- if (isParameter) owner.newValueParameter(name, pos, flags)
- else owner.newValue(name, pos, flags)
+ if (isParameter) owner.newValueParameter(name.toTermName, pos, flags)
+ else owner.newValue(name.toTermName, pos, flags)
}
}
private def createFieldSymbol(tree: ValDef): TermSymbol =
- owner.newValue(nme.getterToLocal(tree.name), tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
+ owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal)
private def createImportSymbol(tree: Tree) =
NoSymbol.newImport(tree.pos) setInfo completerOf(tree)
@@ -335,11 +350,10 @@ trait Namers extends MethodSynthesis {
}
private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = {
- val file = contextFile
if (clazz.sourceFile != null && clazz.sourceFile != contextFile)
- debugwarn("!!! Source mismatch in " + clazz + ": " + clazz.sourceFile + " vs. " + contextFile)
+ devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile")
- clazz.sourceFile = contextFile
+ clazz.associatedFile = contextFile
if (clazz.sourceFile != null) {
assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile)
currentRun.symSource(clazz) = clazz.sourceFile
@@ -353,7 +367,7 @@ trait Namers extends MethodSynthesis {
val existing = context.scope.lookup(tree.name)
val isRedefinition = (
existing.isType
- && existing.owner.isPackageClass
+ && existing.isTopLevel
&& context.scope == existing.owner.info.decls
&& currentRun.canRedefine(existing)
)
@@ -366,8 +380,8 @@ trait Namers extends MethodSynthesis {
else assignAndEnterSymbol(tree) setFlag inConstructorFlag
}
clazz match {
- case csym: ClassSymbol if csym.owner.isPackageClass => enterClassSymbol(tree, csym)
- case _ => clazz
+ case csym: ClassSymbol if csym.isTopLevel => enterClassSymbol(tree, csym)
+ case _ => clazz
}
}
@@ -379,8 +393,8 @@ trait Namers extends MethodSynthesis {
if (sym eq NoSymbol) return
val ctx = if (context.owner.isPackageObjectClass) context.outer else context
- val module = if (sym.isModule) sym else ctx.scope lookup tree.name.toTermName
- val clazz = if (sym.isClass) sym else ctx.scope lookup tree.name.toTypeName
+ val module = if (sym.isModule) sym else ctx.scope lookupModule tree.name
+ val clazz = if (sym.isClass) sym else ctx.scope lookupClass tree.name
val fails = (
module.isModule
&& clazz.isClass
@@ -426,8 +440,8 @@ trait Namers extends MethodSynthesis {
m.moduleClass setFlag moduleClassFlags(moduleFlags)
setPrivateWithin(tree, m.moduleClass)
}
- if (m.owner.isPackageClass && !m.isPackage) {
- m.moduleClass.sourceFile = contextFile
+ if (m.isTopLevel && !m.isPackage) {
+ m.moduleClass.associatedFile = contextFile
currentRun.symSource(m) = m.moduleClass.sourceFile
registerTopLevelSym(m)
}
@@ -489,7 +503,7 @@ trait Namers extends MethodSynthesis {
typer.permanentlyHiddenWarning(pos, to0, e.sym)
else if (context ne context.enclClass) {
val defSym = context.prefix.member(to) filter (
- sym => sym.exists && context.isAccessible(sym, context.prefix, false))
+ sym => sym.exists && context.isAccessible(sym, context.prefix, superAccess = false))
defSym andAlso (typer.permanentlyHiddenWarning(pos, to0, _))
}
@@ -509,7 +523,7 @@ trait Namers extends MethodSynthesis {
if (from != nme.WILDCARD && base != ErrorType) {
if (isValid(from)) {
// for Java code importing Scala objects
- if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) {
+ if (!nme.isModuleName(from) || isValid(from.dropModule)) {
typer.TyperErrorGen.NotAMemberError(tree, expr, from)
}
}
@@ -546,8 +560,8 @@ trait Namers extends MethodSynthesis {
val sym = copyDef.symbol
val lazyType = completerOf(copyDef)
- /** Assign the types of the class parameters to the parameters of the
- * copy method. See comment in `Unapplies.caseClassCopyMeth` */
+ /* Assign the types of the class parameters to the parameters of the
+ * copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
@@ -587,17 +601,6 @@ trait Namers extends MethodSynthesis {
}
}
- def enterIfNotThere(sym: Symbol) {
- val scope = context.scope
- @tailrec def search(e: ScopeEntry) {
- if ((e eq null) || (e.owner ne scope))
- scope enter sym
- else if (e.sym ne sym) // otherwise, aborts since we found sym
- search(e.tail)
- }
- search(scope lookupEntry sym.name)
- }
-
def enterValDef(tree: ValDef) {
if (noEnterGetterSetter(tree))
assignAndEnterFinishedSymbol(tree)
@@ -620,7 +623,7 @@ trait Namers extends MethodSynthesis {
// via "x$lzy" as can be seen in test #3927.
val sym = (
if (owner.isClass) createFieldSymbol(tree)
- else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, tree.mods.flags & ~IMPLICIT)
+ else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | ARTIFACT) & ~IMPLICIT)
)
enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor)
}
@@ -641,7 +644,7 @@ trait Namers extends MethodSynthesis {
case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) =>
assignAndEnterFinishedSymbol(tree)
case DefDef(mods, name, tparams, _, _, _) =>
- val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE else 0
+ val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
@@ -651,15 +654,12 @@ trait Namers extends MethodSynthesis {
}
def enterClassDef(tree: ClassDef) {
- val ClassDef(mods, name, tparams, impl) = tree
+ val ClassDef(mods, _, _, impl) = tree
val primaryConstructorArity = treeInfo.firstConstructorArgs(impl.body).size
tree.symbol = enterClassSymbol(tree)
tree.symbol setInfo completerOf(tree)
if (mods.isCase) {
- if (primaryConstructorArity > MaxFunctionArity)
- MaxParametersCaseClassError(tree)
-
val m = ensureCompanionObject(tree, caseModuleDef)
m.moduleClass.updateAttachment(new ClassForCaseCompanionAttachment(tree))
}
@@ -672,7 +672,7 @@ trait Namers extends MethodSynthesis {
m.updateAttachment(new ConstructorDefaultsAttachment(tree, null))
}
val owner = tree.symbol.owner
- if (settings.lint.value && owner.isPackageObjectClass && !mods.isImplicit) {
+ if (settings.lint && owner.isPackageObjectClass && !mods.isImplicit) {
context.unit.warning(tree.pos,
"it is not recommended to define classes/objects inside of package objects.\n" +
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
@@ -690,22 +690,9 @@ trait Namers extends MethodSynthesis {
validateCompanionDefs(tree)
}
- // this logic is needed in case typer was interrupted half
- // way through and then comes back to do the tree again. In
- // that case the definitions that were already attributed as
- // well as any default parameters of such methods need to be
- // re-entered in the current scope.
- protected def enterExistingSym(sym: Symbol): Context = {
- if (forInteractive && sym != null && sym.owner.isTerm) {
- enterIfNotThere(sym)
- if (sym.isLazy)
- sym.lazyAccessor andAlso enterIfNotThere
-
- for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
- defAtt.defaultGetters foreach enterIfNotThere
- }
- this.context
- }
+ // Hooks which are overridden in the presentation compiler
+ def enterExistingSym(sym: Symbol): Context = this.context
+ def enterIfNotThere(sym: Symbol) { }
def enterSyntheticSym(tree: Tree): Symbol = {
enterSym(tree)
@@ -715,41 +702,55 @@ trait Namers extends MethodSynthesis {
// --- Lazy Type Assignment --------------------------------------------------
- def initializeLowerBounds(tp: Type): Type = {
+ def findCyclicalLowerBound(tp: Type): Symbol = {
tp match {
case TypeBounds(lo, _) =>
// check that lower bound is not an F-bound
- for (TypeRef(_, sym, _) <- lo)
- sym.initialize
+ // but carefully: class Foo[T <: Bar[_ >: T]] should be allowed
+ for (tp1 @ TypeRef(_, sym, _) <- lo) {
+ if (settings.breakCycles) {
+ if (!sym.maybeInitialize) {
+ log(s"Cycle inspecting $lo for possible f-bounds: ${sym.fullLocationString}")
+ return sym
+ }
+ }
+ else sym.initialize
+ }
case _ =>
}
- tp
+ NoSymbol
}
def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym =>
+ // this early test is there to avoid infinite baseTypes when
+ // adding setters and getters --> bug798
+ // It is a def in an attempt to provide some insulation against
+ // uninitialized symbols misleading us. It is not a certainty
+ // this accomplishes anything, but performance is a non-consideration
+ // on these flag checks so it can't hurt.
+ def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential
logAndValidate(sym) {
- val tp = initializeLowerBounds(typeSig(tree))
+ val tp = typeSig(tree)
+
+ findCyclicalLowerBound(tp) andAlso { sym =>
+ if (needsCycleCheck) {
+ // neg/t1224: trait C[T] ; trait A { type T >: C[T] <: C[C[T]] }
+ // To avoid an infinite loop on the above, we cannot break all cycles
+ log(s"Reinitializing info of $sym to catch any genuine cycles")
+ sym reset sym.info
+ sym.initialize
+ }
+ }
sym setInfo {
if (sym.isJavaDefined) RestrictJavaArraysMap(tp)
else tp
}
- // this early test is there to avoid infinite baseTypes when
- // adding setters and getters --> bug798
- val needsCycleCheck = (sym.isAliasType || sym.isAbstractType) && !sym.isParameter
- if (needsCycleCheck && !typer.checkNonCyclic(tree.pos, tp))
- sym setInfo ErrorType
+ if (needsCycleCheck) {
+ log(s"Needs cycle check: ${sym.debugLocationString}")
+ if (!typer.checkNonCyclic(tree.pos, tp))
+ sym setInfo ErrorType
+ }
}
- // tree match {
- // case ClassDef(_, _, _, impl) =>
- // val parentsOK = (
- // treeInfo.isInterface(sym, impl.body)
- // || (sym eq ArrayClass)
- // || (sym isSubClass AnyValClass)
- // )
- // if (!parentsOK)
- // ensureParent(sym, AnyRefClass)
- // case _ => ()
- // }
}
def moduleClassTypeCompleter(tree: ModuleDef) = {
@@ -764,7 +765,7 @@ trait Namers extends MethodSynthesis {
def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitTpe)
else NullaryMethodType(typeSig(tree))
pluginsTypeSigAccessor(tp, typer, tree, sym)
}
@@ -807,23 +808,19 @@ trait Namers extends MethodSynthesis {
case _ =>
false
}
-
- val tpe1 = dropRepeatedParamType(tpe.deconst)
- val tpe2 = tpe1.widen
-
- // This infers Foo.type instead of "object Foo"
- // See Infer#adjustTypeArgs for the polymorphic case.
- if (tpe.typeSymbolDirect.isModuleClass) tpe1
- else if (sym.isVariable || sym.isMethod && !sym.hasAccessorFlag)
- if (tpe2 <:< pt) tpe2 else tpe1
- else if (isHidden(tpe)) tpe2
- // In an attempt to make pattern matches involving method local vals
- // compilable into switches, for a time I had a more generous condition:
- // `if (sym.isFinal || sym.isLocal) tpe else tpe1`
- // This led to issues with expressions like classOf[List[_]] which apparently
- // depend on being deconst-ed here, so this is again the original:
- else if (!sym.isFinal) tpe1
- else tpe
+ val shouldWiden = (
+ !tpe.typeSymbolDirect.isModuleClass // Infer Foo.type instead of "object Foo"
+ && (tpe.widen <:< pt) // Don't widen our way out of conforming to pt
+ && ( sym.isVariable
+ || sym.isMethod && !sym.hasAccessorFlag
+ || isHidden(tpe)
+ )
+ )
+ dropIllegalStarTypes(
+ if (shouldWiden) tpe.widen
+ else if (sym.isFinal) tpe // "final val" allowed to retain constant type
+ else tpe.deconst
+ )
}
/** Computes the type of the body in a ValDef or DefDef, and
* assigns the type to the tpt's node. Returns the type.
@@ -851,7 +848,7 @@ trait Namers extends MethodSynthesis {
val sym = (
if (hasType || hasName) {
- owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe
+ owner.typeOfThis = if (hasType) selfTypeCompleter(tpt) else owner.tpe_*
val selfSym = owner.thisSym setPos self.pos
if (hasName) selfSym setName name else selfSym
}
@@ -866,16 +863,11 @@ trait Namers extends MethodSynthesis {
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
- val tp = tpt.tpe
- val inheritsSelf = tp.typeSymbol == owner
- if (inheritsSelf)
- InheritsItselfError(tpt)
-
- if (inheritsSelf || tp.isError) AnyRefClass.tpe
- else tp
+ if (tpt.tpe.isError) AnyRefTpe
+ else tpt.tpe
}
- val parents = typer.parentTypes(templ) map checkParent
+ val parents = typer.typedParentTypes(templ) map checkParent
enterSelf(templ.self)
@@ -901,11 +893,10 @@ trait Namers extends MethodSynthesis {
val modClass = companionSymbolOf(clazz, context).moduleClass
modClass.attachments.get[ClassForCaseCompanionAttachment] foreach { cma =>
val cdef = cma.caseClass
- def hasCopy(decls: Scope) = (decls lookup nme.copy) != NoSymbol
+ def hasCopy = (decls containsName nme.copy) || parents.exists(_ member nme.copy exists)
+
// SI-5956 needs (cdef.symbol == clazz): there can be multiple class symbols with the same name
- if (cdef.symbol == clazz && !hasCopy(decls) &&
- !parents.exists(p => hasCopy(p.typeSymbol.info.decls)) &&
- !parents.flatMap(_.baseClasses).distinct.exists(bc => hasCopy(bc.info.decls)))
+ if (cdef.symbol == clazz && !hasCopy)
addCopyMethod(cdef, templateNamer)
}
}
@@ -951,9 +942,9 @@ trait Namers extends MethodSynthesis {
// Assign the moduleClass info (templateSig returns a ClassInfoType)
val clazz = moduleSym.moduleClass
clazz setInfo pluginsTp
- // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // clazz.tpe_* returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
// (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
- clazz.tpe
+ clazz.tpe_*
}
/**
@@ -997,7 +988,7 @@ trait Namers extends MethodSynthesis {
var vparamSymss = enterValueParams(vparamss)
- /**
+ /*
* Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
* All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
* so the resulting type is a valid external method type, it does not contain (references to) skolems.
@@ -1031,7 +1022,7 @@ trait Namers extends MethodSynthesis {
res.substSym(tparamSkolems, tparamSyms)
}
- /**
+ /*
* Creates a schematic method type which has WildcardTypes for non specified
* return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
* type schema is
@@ -1055,7 +1046,7 @@ trait Namers extends MethodSynthesis {
// def overriddenSymbol = meth.nextOverriddenSymbol
- /**
+ /*
* If `meth` doesn't have an explicit return type, extracts the return type from the method
* overridden by `meth` (if there's an unique one). This type is lateron used as the expected
* type for computing the type of the rhs. The resulting type references type skolems for
@@ -1111,7 +1102,7 @@ trait Namers extends MethodSynthesis {
}
if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
+ tpt defineType context.enclClass.owner.tpe_*
tpt setPos meth.pos.focus
}
@@ -1147,7 +1138,7 @@ trait Namers extends MethodSynthesis {
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (meth.isTermMacro) {
+ if (meth.isMacro) {
typer.computeMacroDefType(ddef, resTpFromOverride)
}
@@ -1276,23 +1267,16 @@ trait Namers extends MethodSynthesis {
val defaultTree = atPos(vparam.pos.focus) {
DefDef(
- Modifiers(meth.flags & DefaultGetterFlags) | SYNTHETIC | DEFAULTPARAM | oflag,
+ Modifiers(meth.flags & DefaultGetterFlags) | (SYNTHETIC | DEFAULTPARAM | oflag).toLong,
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
- if (forInteractive && default.owner.isTerm) {
- // save the default getters as attachments in the method symbol. if compiling the
- // same local block several times (which can happen in interactive mode) we might
- // otherwise not find the default symbol, because the second time it the method
- // symbol will be re-entered in the scope but the default parameter will not.
- val att = meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
- case Some(att) => att.defaultGetters += default
- case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
- }
- }
- } else if (baseHasDefault) {
+ if (default.owner.isTerm)
+ saveDefaultGetter(meth, default)
+ }
+ else if (baseHasDefault) {
// the parameter does not have a default itself, but the
// corresponding parameter in the base class does.
sym.setFlag(DEFAULTPARAM)
@@ -1358,20 +1342,22 @@ trait Namers extends MethodSynthesis {
private def importSig(imp: Import) = {
val Import(expr, selectors) = imp
val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
+
if (expr1.symbol != null && expr1.symbol.isRootPackage)
RootImportError(imp)
if (expr1.isErrorTyped)
ErrorType
else {
+ if (!treeInfo.isStableIdentifierPattern(expr1))
+ typer.TyperErrorGen.UnstableTreeError(expr1)
+
val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
checkSelectors(newImport)
transformed(imp) = newImport
// copy symbol and type attributes back into old expression
// so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
+ expr setSymbol expr1.symbol setType expr1.tpe
ImportType(expr1)
}
}
@@ -1393,7 +1379,9 @@ trait Namers extends MethodSynthesis {
if (!cdef.symbol.hasAbstractFlag)
namer.enterSyntheticSym(caseModuleApplyMeth(cdef))
- namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef))
+ val primaryConstructorArity = treeInfo.firstConstructorArgs(cdef.impl.body).size
+ if (primaryConstructorArity <= MaxTupleArity)
+ namer.enterSyntheticSym(caseModuleUnapplyMeth(cdef))
}
def addCopyMethod(cdef: ClassDef, namer: Namer) {
@@ -1407,12 +1395,12 @@ trait Namers extends MethodSynthesis {
*/
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
- /** For definitions, transform Annotation trees to AnnotationInfos, assign
- * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
- * We have to parse definition annotations here (not in the typer when traversing
- * the MemberDef tree): the typer looks at annotations of certain symbols; if
- * they were added only in typer, depending on the compilation order, they may
- * or may not be visible.
+ /* For definitions, transform Annotation trees to AnnotationInfos, assign
+ * them to the sym's annotations. Type annotations: see Typer.typedAnnotated
+ * We have to parse definition annotations here (not in the typer when traversing
+ * the MemberDef tree): the typer looks at annotations of certain symbols; if
+ * they were added only in typer, depending on the compilation order, they may
+ * or may not be visible.
*/
def annotate(annotated: Symbol) = {
// typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter
@@ -1424,7 +1412,7 @@ trait Namers extends MethodSynthesis {
AnnotationInfo lazily {
val context1 = typer.context.make(ann)
context1.setReportErrors()
- beforeTyper(newTyper(context1) typedAnnotation ann)
+ enteringTyper(newTyper(context1) typedAnnotation ann)
}
}
if (ainfos.nonEmpty) {
@@ -1476,12 +1464,6 @@ trait Namers extends MethodSynthesis {
tpe
}
- def ensureParent(clazz: Symbol, parent: Symbol) = {
- val info0 = clazz.info
- val info1 = includeParent(info0, parent)
- if (info0 ne info1) clazz setInfo info1
- }
-
class LogTransitions[S](onEnter: S => String, onExit: S => String) {
val enabled = settings.debug.value
@inline final def apply[T](entity: S)(body: => T): T = {
@@ -1511,8 +1493,8 @@ trait Namers extends MethodSynthesis {
private object RestrictJavaArraysMap extends TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, ArrayClass, List(elemtp))
- if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectClass.tpe) =>
- TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectClass.tpe))))
+ if elemtp.typeSymbol.isAbstractType && !(elemtp <:< ObjectTpe) =>
+ TypeRef(pre, ArrayClass, List(intersectionType(List(elemtp, ObjectTpe))))
case _ =>
mapOver(tp)
}
@@ -1534,7 +1516,7 @@ trait Namers extends MethodSynthesis {
AbstractMemberWithModiferError(sym, flag)
}
def checkNoConflict(flag1: Int, flag2: Int) {
- if (sym hasAllFlags flag1 | flag2)
+ if (sym hasAllFlags flag1.toLong | flag2)
IllegalModifierCombination(sym, flag1, flag2)
}
if (sym.isImplicit) {
@@ -1542,7 +1524,7 @@ trait Namers extends MethodSynthesis {
fail(ImplicitConstr)
if (!(sym.isTerm || (sym.isClass && !sym.isTrait)))
fail(ImplicitNotTermOrClass)
- if (sym.owner.isPackageClass)
+ if (sym.isTopLevel)
fail(ImplicitAtToplevel)
}
if (sym.isClass) {
@@ -1650,7 +1632,7 @@ trait Namers extends MethodSynthesis {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
if (defnSym.isAbstractType)
- newNamerFor(ctx, tree) enterSyms tparams //@M
+ newNamer(ctx.makeNewScope(tree, tree.symbol)) enterSyms tparams //@M
restp complete sym
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index d54f894c62..8e9933f734 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -8,7 +8,6 @@ package typechecker
import symtab.Flags._
import scala.collection.mutable
-import scala.ref.WeakReference
import scala.reflect.ClassTag
/**
@@ -20,6 +19,7 @@ trait NamesDefaults { self: Analyzer =>
import global._
import definitions._
import NamesDefaultsErrorsGen._
+ import treeInfo.WildcardStarArg
// Default getters of constructors are added to the companion object in the
// typeCompleter of the constructor (methodSig). To compute the signature,
@@ -42,13 +42,11 @@ trait NamesDefaults { self: Analyzer =>
blockTyper: Typer
) { }
- val noApplyInfo = NamedApplyInfo(None, Nil, Nil, null)
-
- def nameOf(arg: Tree) = arg match {
- case AssignOrNamedArg(Ident(name), rhs) => Some(name)
- case _ => None
+ private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name }
+ def isNamedArg(arg: Tree) = arg match {
+ case AssignOrNamedArg(Ident(_), _) => true
+ case _ => false
}
- def isNamed(arg: Tree) = nameOf(arg).isDefined
/** @param pos maps indices from old to new */
def reorderArgs[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
@@ -58,13 +56,13 @@ trait NamesDefaults { self: Analyzer =>
}
/** @param pos maps indices from new to old (!) */
- def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
+ private def reorderArgsInv[T: ClassTag](args: List[T], pos: Int => Int): List[T] = {
val argsArray = args.toArray
(argsArray.indices map (i => argsArray(pos(i)))).toList
}
/** returns `true` if every element is equal to its index */
- def isIdentity(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
+ def allArgsArePositional(a: Array[Int]) = (0 until a.length).forall(i => a(i) == i)
/**
* Transform a function application into a Block, and assigns typer.context
@@ -107,14 +105,14 @@ trait NamesDefaults { self: Analyzer =>
* @return the transformed application (a Block) together with the NamedApplyInfo.
* if isNamedApplyBlock(tree), returns the existing context.namedApplyBlockInfo
*/
- def transformNamedApplication(typer: Typer, mode: Int, pt: Type)
+ def transformNamedApplication(typer: Typer, mode: Mode, pt: Type)
(tree: Tree, argPos: Int => Int): Tree = {
import typer._
import typer.infer._
val context = typer.context
import context.unit
- /**
+ /*
* Transform a function into a block, and passing context.namedApplyBlockInfo to
* the new block as side-effect.
*
@@ -164,14 +162,14 @@ trait NamesDefaults { self: Analyzer =>
// never used for constructor calls, they always have a stable qualifier
def blockWithQualifier(qual: Tree, selected: Name) = {
- val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos) setInfo qual.tpe
+ val sym = blockTyper.context.owner.newValue(unit.freshTermName("qual$"), qual.pos, newFlags = ARTIFACT) setInfo qual.tpe
blockTyper.context.scope enter sym
val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType)
// it stays in Vegas: SI-5720, SI-5727
qual changeOwner (blockTyper.context.owner -> sym)
val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name)))
- var baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
+ val baseFunTransformed = atPos(baseFun.pos.makeTransparent) {
// setSymbol below is important because the 'selected' function might be overloaded. by
// assigning the correct method symbol, typedSelect will just assign the type. the reason
// to still call 'typed' is to correctly infer singleton types, SI-5259.
@@ -204,7 +202,7 @@ trait NamesDefaults { self: Analyzer =>
if (module == NoSymbol) None
else {
val ref = atPos(pos.focus)(gen.mkAttributedRef(pre, module))
- if (module.isStable && pre.isStable) // fixes #4524. the type checker does the same for
+ if (treeInfo.admitsTypeSelection(ref)) // fixes #4524. the type checker does the same for
ref.setType(singleType(pre, module)) // typedSelect, it calls "stabilize" on the result.
Some(ref)
}
@@ -259,7 +257,7 @@ trait NamesDefaults { self: Analyzer =>
}
}
- /**
+ /*
* For each argument (arg: T), create a local value
* x$n: T = arg
*
@@ -281,8 +279,8 @@ trait NamesDefaults { self: Analyzer =>
val repeated = isScalaRepeatedParamType(paramTpe)
val argTpe = (
if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
+ case WildcardStarArg(expr) => expr.tpe
+ case _ => seqType(arg.tpe)
}
else {
// TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a
@@ -291,7 +289,7 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo (
if (byName) functionType(Nil, argTpe) else argTpe
)
Some((context.scope.enter(s), byName, repeated))
@@ -307,11 +305,8 @@ trait NamesDefaults { self: Analyzer =>
} else {
new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502
if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) =>
- expr
- case _ =>
- val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
- blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
+ case WildcardStarArg(expr) => expr
+ case _ => blockTyper typed gen.mkSeqApply(resetLocalAttrs(arg))
} else arg
}
Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
@@ -330,7 +325,7 @@ trait NamesDefaults { self: Analyzer =>
assert(isNamedApplyBlock(transformedFun), transformedFun)
val NamedApplyInfo(qual, targs, vargss, blockTyper) =
context.namedApplyBlockInfo.get._2
- val existingBlock @ Block(stats, funOnly) = transformedFun
+ val Block(stats, funOnly) = transformedFun
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
@@ -378,7 +373,9 @@ trait NamesDefaults { self: Analyzer =>
}
}
- def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOf _): (List[Symbol], Boolean) = {
+ def makeNamedTypes(syms: List[Symbol]) = syms map (sym => NamedType(sym.name, sym.tpe))
+
+ def missingParams[T](args: List[T], params: List[Symbol], argName: T => Option[Name] = nameOfNamedArg _): (List[Symbol], Boolean) = {
val namedArgs = args.dropWhile(arg => {
val n = argName(arg)
n.isEmpty || params.forall(p => p.name != n.get)
@@ -413,7 +410,7 @@ trait NamesDefaults { self: Analyzer =>
// TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope)
if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649
else {
- var default1 = qual match {
+ var default1: Tree = qual match {
case Some(q) => gen.mkAttributedSelect(q.duplicate, defGetter)
case None => gen.mkAttributedRef(defGetter)
@@ -459,20 +456,6 @@ trait NamesDefaults { self: Analyzer =>
} else NoSymbol
}
- private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = {
- val savedParams = context.extractUndetparams()
- val savedReporting = context.ambiguousErrors
-
- context.setAmbiguousErrors(false)
- try fn(savedParams)
- finally {
- context.setAmbiguousErrors(savedReporting)
- //@M note that we don't get here when an ambiguity was detected (during the computation of res),
- // as errorTree throws an exception
- context.undetparams = savedParams
- }
- }
-
/** A full type check is very expensive; let's make sure there's a name
* somewhere which could potentially be ambiguous before we go that route.
*/
@@ -487,12 +470,10 @@ trait NamesDefaults { self: Analyzer =>
// def f[T](x: T) = x
// var x = 0
// f(x = 1) << "x = 1" typechecks with expected type WildcardType
- savingUndeterminedTParams(context) { udp =>
+ val udp = context.undetparams
+ context.savingUndeterminedTypeParams(reportAmbiguous = false) {
val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) {
- override def apply(tp: Type): Type = super.apply(tp match {
- case TypeRef(_, ByNameParamClass, x :: Nil) => x
- case _ => tp
- })
+ override def apply(tp: Type): Type = super.apply(dropByName(tp))
}
// This throws an exception which is caught in `tryTypedApply` (as it
// uses `silent`) - unfortunately, tryTypedApply recovers from the
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 03ce710700..c9849eebb5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -59,7 +59,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
override def changesBaseClasses = false
override def transformInfo(sym: Symbol, tp: Type): Type = {
- if (sym.isModule && !sym.isStatic) sym setFlag (lateMETHOD | STABLE)
+ // !!! This is a sketchy way to do things.
+ // It would be better to replace the module symbol with a method symbol
+ // rather than creating this module/method hybrid which must be special
+ // cased all over the place. Look for the call sites which use(d) some
+ // variation of "isMethod && !isModule", which to an observer looks like
+ // a nonsensical condition. (It is now "isModuleNotMethod".)
+ if (sym.isModule && !sym.isStatic) {
+ sym setFlag lateMETHOD | STABLE
+ // Note that this as far as we can see it works equally well
+ // to set the METHOD flag here and dump lateMETHOD, but it does
+ // mean that under separate compilation the typer will see
+ // modules as methods (albeit stable ones with singleton types.)
+ // So for now lateMETHOD lives while we try to convince ourselves
+ // we can live without it or deliver that info some other way.
+ log(s"Stabilizing module method for ${sym.fullLocationString}")
+ }
super.transformInfo(sym, tp)
}
@@ -71,7 +86,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (sym.hasAccessBoundary) "" + sym.privateWithin.name else ""
)
- def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.normalize, tp2.normalize) match {
+ def overridesTypeInPrefix(tp1: Type, tp2: Type, prefix: Type): Boolean = (tp1.dealiasWiden, tp2.dealiasWiden) match {
case (MethodType(List(), rtp1), NullaryMethodType(rtp2)) =>
rtp1 <:< rtp2
case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) =>
@@ -95,7 +110,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
class RefCheckTransformer(unit: CompilationUnit) extends Transformer {
- var localTyper: analyzer.Typer = typer;
+ var localTyper: analyzer.Typer = typer
var currentApplication: Tree = EmptyTree
var inPattern: Boolean = false
var checkedCombinations = Set[List[Type]]()
@@ -111,7 +126,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val defaultMethodNames = defaultGetters map (sym => nme.defaultGetterToMethod(sym.name))
defaultMethodNames.toList.distinct foreach { name =>
- val methods = clazz.info.findMember(name, 0L, METHOD, false).alternatives
+ val methods = clazz.info.findMember(name, 0L, METHOD, stableOnly = false).alternatives
def hasDefaultParam(tpe: Type): Boolean = tpe match {
case MethodType(params, restpe) => (params exists (_.hasDefault)) || hasDefaultParam(restpe)
case _ => false
@@ -133,7 +148,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
}
- if (settings.lint.value) {
+
+ // Check for doomed attempt to overload applyDynamic
+ if (clazz isSubClass DynamicClass) {
+ for ((_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.typeParams.length)) {
+ unit.error(m1.pos, "implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)")
+ }
+ }
+
+ // This has become noisy with implicit classes.
+ if (settings.lint && settings.developer) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
// implicit classes leave both a module symbol and a method symbol as residue
val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
@@ -241,7 +265,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case class MixinOverrideError(member: Symbol, msg: String)
- var mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
+ val mixinOverrideErrors = new ListBuffer[MixinOverrideError]()
def printMixinOverrideErrors() {
mixinOverrideErrors.toList match {
@@ -273,8 +297,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else "")
}
- /** Check that all conditions for overriding `other` by `member`
- * of class `clazz` are met.
+ /* Check that all conditions for overriding `other` by `member`
+ * of class `clazz` are met.
*/
def checkOverride(member: Symbol, other: Symbol) {
debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString))
@@ -299,7 +323,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
infoStringWithLocation(other),
infoStringWithLocation(member)
)
- else if (settings.debug.value)
+ else if (settings.debug)
analyzer.foundReqMsg(member.tpe, other.tpe)
else ""
@@ -353,8 +377,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
- /** Is the intersection between given two lists of overridden symbols empty?
- */
+ /* Is the intersection between given two lists of overridden symbols empty? */
def intersectionIsEmpty(syms1: List[Symbol], syms2: List[Symbol]) =
!(syms1 exists (syms2 contains _))
@@ -378,11 +401,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if (!isOverrideAccessOK) {
overrideAccessError()
} else if (other.isClass) {
- overrideError("cannot be used here - class definitions cannot be overridden");
+ overrideError("cannot be used here - class definitions cannot be overridden")
} else if (!other.isDeferred && member.isClass) {
- overrideError("cannot be used here - classes can only override abstract types");
+ overrideError("cannot be used here - classes can only override abstract types")
} else if (other.isEffectivelyFinal) { // (1.2)
- overrideError("cannot override final member");
+ overrideError("cannot override final member")
} else if (!other.isDeferred && !other.hasFlag(DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
@@ -400,7 +423,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) {
// !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here.
// !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches.
- if (!settings.overrideVars.value)
+ if (!settings.overrideVars)
overrideError("cannot override a mutable variable")
}
else if (member.isAnyOverride &&
@@ -418,13 +441,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
member.isValue && !member.isLazy) {
overrideError("must be declared lazy to override a concrete lazy value")
} else if (other.isDeferred && member.isTermMacro) { // (1.9)
- overrideError("cannot override an abstract method")
+ overrideError("cannot be used here - term macros cannot override abstract methods")
} else if (other.isTermMacro && !member.isTermMacro) { // (1.10)
- overrideError("cannot override a macro")
+ overrideError("cannot be used here - only term macros can override term macros")
} else {
checkOverrideTypes()
checkOverrideDeprecated()
- if (settings.warnNullaryOverride.value) {
+ if (settings.warnNullaryOverride) {
if (other.paramss.isEmpty && !member.paramss.isEmpty) {
unit.warning(member.pos, "non-nullary method overrides nullary method")
}
@@ -441,7 +464,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// @M: substSym
if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6)
- overrideTypeError();
+ overrideTypeError()
}
else if (other.isAbstractType) {
//if (!member.typeParams.isEmpty) // (1.7) @MAT
@@ -466,12 +489,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// check a type alias's RHS corresponds to its declaration
// this overlaps somewhat with validateVariance
if(member.isAliasType) {
- // println("checkKindBounds" + ((List(member), List(memberTp.normalize), self, member.owner)))
- val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.normalize), self, member.owner)
+ // println("checkKindBounds" + ((List(member), List(memberTp.dealiasWiden), self, member.owner)))
+ val kindErrors = typer.infer.checkKindBounds(List(member), List(memberTp.dealiasWiden), self, member.owner)
if(!kindErrors.isEmpty)
unit.error(member.pos,
- "The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
+ "The kind of the right-hand side "+memberTp.dealiasWiden+" of "+member.keyString+" "+
member.varianceString + member.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
} else if (member.isAbstractType) {
@@ -488,13 +511,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
if (member.isStable && !otherTp.isVolatile) {
- if (memberTp.isVolatile)
+ // (1.4), pt 2 -- member.isStable && memberTp.isVolatile started being possible after SI-6815
+ // (before SI-6815, !symbol.tpe.isVolatile was implied by symbol.isStable)
+ // TODO: allow overriding when @uncheckedStable?
+ if (memberTp.isVolatile)
overrideError("has a volatile type; cannot override a member with non-volatile type")
- else memberTp.normalize.resultType match {
+ else memberTp.dealiasWiden.resultType match {
case rt: RefinedType if !(rt =:= otherTp) && !(checkedCombinations contains rt.parents) =>
// might mask some inconsistencies -- check overrides
checkedCombinations += rt.parents
- val tsym = rt.typeSymbol;
+ val tsym = rt.typeSymbol
if (tsym.pos == NoPosition) tsym setPos member.pos
checkAllOverrides(tsym, typesOnly = true)
case _ =>
@@ -515,9 +541,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val opc = new overridingPairs.Cursor(clazz)
while (opc.hasNext) {
//Console.println(opc.overriding/* + ":" + opc.overriding.tpe*/ + " in "+opc.overriding.fullName + " overrides " + opc.overridden/* + ":" + opc.overridden.tpe*/ + " in "+opc.overridden.fullName + "/"+ opc.overridden.hasFlag(DEFERRED));//debug
- if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden);
+ if (!opc.overridden.isClass) checkOverride(opc.overriding, opc.overridden)
- opc.next
+ opc.next()
}
printMixinOverrideErrors()
@@ -549,13 +575,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def uncurryAndErase(tp: Type) = erasure.erasure(sym)(uncurry.transformInfo(sym, tp))
val tp1 = uncurryAndErase(clazz.thisType.memberType(sym))
val tp2 = uncurryAndErase(clazz.thisType.memberType(other))
- afterErasure(tp1 matches tp2)
+ exitingErasure(tp1 matches tp2)
})
def ignoreDeferred(member: Symbol) = (
(member.isAbstractType && !member.isFBounded) || (
member.isJavaDefined &&
- // the test requires afterErasure so shouldn't be
+ // the test requires exitingErasure so shouldn't be
// done if the compiler has no erasure phase available
(currentRun.erasurePhase == NoPhase || javaErasedOverridingSym(member) != NoSymbol)
)
@@ -578,8 +604,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def stubImplementations: List[String] = {
// Grouping missing methods by the declaring class
val regrouped = missingMethods.groupBy(_.owner).toList
- def membersStrings(members: List[Symbol]) =
- members.sortBy("" + _.name) map (m => m.defStringSeenAs(clazz.tpe memberType m) + " = ???")
+ def membersStrings(members: List[Symbol]) = {
+ members foreach fullyInitializeSymbol
+ members.sortBy(_.name) map (m => m.defStringSeenAs(clazz.tpe_* memberType m) + " = ???")
+ }
if (regrouped.tail.isEmpty)
membersStrings(regrouped.head._2)
@@ -718,16 +746,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
else if (clazz.isTrait && !(clazz isSubClass AnyValClass)) {
// For non-AnyVal classes, prevent abstract methods in interfaces that override
// final members in Object; see #4431
- for (decl <- clazz.info.decls.iterator) {
- val overridden = decl.overriddenSymbol(ObjectClass)
+ for (decl <- clazz.info.decls) {
+ // Have to use matchingSymbol, not a method involving overridden symbols,
+ // because the scala type system understands that an abstract method here does not
+ // override a concrete method in Object. The jvm, however, does not.
+ val overridden = decl.matchingSymbol(ObjectClass, ObjectTpe)
if (overridden.isFinal)
unit.error(decl.pos, "trait cannot redefine final method from class AnyRef")
}
}
- /** Returns whether there is a symbol declared in class `inclazz`
- * (which must be different from `clazz`) whose name and type
- * seen as a member of `class.thisType` matches `member`'s.
+ /* Returns whether there is a symbol declared in class `inclazz`
+ * (which must be different from `clazz`) whose name and type
+ * seen as a member of `class.thisType` matches `member`'s.
*/
def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = {
val isVarargs = hasRepeatedParam(member.tpe)
@@ -739,22 +770,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
matches(member.tpe) || (isVarargs && matches(varargsType))
}
- /** The rules for accessing members which have an access boundary are more
- * restrictive in java than scala. Since java has no concept of package nesting,
- * a member with "default" (package-level) access can only be accessed by members
- * in the exact same package. Example:
+ /* The rules for accessing members which have an access boundary are more
+ * restrictive in java than scala. Since java has no concept of package nesting,
+ * a member with "default" (package-level) access can only be accessed by members
+ * in the exact same package. Example:
*
- * package a.b;
- * public class JavaClass { void foo() { } }
+ * package a.b;
+ * public class JavaClass { void foo() { } }
*
- * The member foo() can be accessed only from members of package a.b, and not
- * nested packages like a.b.c. In the analogous scala class:
+ * The member foo() can be accessed only from members of package a.b, and not
+ * nested packages like a.b.c. In the analogous scala class:
*
- * package a.b
- * class ScalaClass { private[b] def foo() = () }
+ * package a.b
+ * class ScalaClass { private[b] def foo() = () }
*
- * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic
- * is restricting the set of matching signatures according to the above semantics.
+ * The member IS accessible to classes in package a.b.c. The javaAccessCheck logic
+ * is restricting the set of matching signatures according to the above semantics.
*/
def javaAccessCheck(sym: Symbol) = (
!inclazz.isJavaDefined // not a java defined member
@@ -774,7 +805,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
val nonMatching: List[Symbol] = clazz.info.member(member.name).alternatives.filterNot(_.owner == clazz).filterNot(_.isFinal)
- def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix);
+ def issueError(suffix: String) = unit.error(member.pos, member.toString() + " overrides nothing" + suffix)
nonMatching match {
case Nil =>
issueError("")
@@ -801,7 +832,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
for (i <- 0 until seenTypes.length)
seenTypes(i) = Nil
- /** validate all base types of a class in reverse linear order. */
+ /* validate all base types of a class in reverse linear order. */
def register(tp: Type): Unit = {
// if (clazz.fullName.endsWith("Collection.Projection"))
// println("validate base type "+tp)
@@ -829,7 +860,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case tp1 :: tp2 :: _ =>
unit.error(clazz.pos, "illegal inheritance;\n " + clazz +
" inherits different type instances of " + baseClass +
- ":\n" + tp1 + " and " + tp2);
+ ":\n" + tp1 + " and " + tp2)
explainTypes(tp1, tp2)
explainTypes(tp2, tp1)
}
@@ -838,161 +869,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Variance Checking --------------------------------------------------------
- private val ContraVariance = -1
- private val NoVariance = 0
- private val CoVariance = 1
- private val AnyVariance = 2
-
- private val escapedPrivateLocals = new mutable.HashSet[Symbol]
-
- val varianceValidator = new Traverser {
-
- /** Validate variance of info of symbol `base` */
- private def validateVariance(base: Symbol) {
- // A flag for when we're in a refinement, meaning method parameter types
- // need to be checked.
- var inRefinement = false
-
- def varianceString(variance: Int): String =
- if (variance == 1) "covariant"
- else if (variance == -1) "contravariant"
- else "invariant";
-
- /** The variance of a symbol occurrence of `tvar`
- * seen at the level of the definition of `base`.
- * The search proceeds from `base` to the owner of `tvar`.
- * Initially the state is covariant, but it might change along the search.
- */
- def relativeVariance(tvar: Symbol): Int = {
- val clazz = tvar.owner
- var sym = base
- var state = CoVariance
- while (sym != clazz && state != AnyVariance) {
- //Console.println("flip: " + sym + " " + sym.isParameter());//DEBUG
- // Flip occurrences of type parameters and parameters, unless
- // - it's a constructor, or case class factory or extractor
- // - it's a type parameter of tvar's owner.
- if (sym.isParameter && !sym.owner.isConstructor && !sym.owner.isCaseApplyOrUnapply &&
- !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem &&
- tvar.owner == sym.owner)) state = -state;
- else if (!sym.owner.isClass ||
- sym.isTerm && ((sym.isPrivateLocal || sym.isProtectedLocal || sym.isSuperAccessor /* super accessors are implicitly local #4345*/) && !(escapedPrivateLocals contains sym))) {
- // return AnyVariance if `sym` is local to a term
- // or is private[this] or protected[this]
- state = AnyVariance
- } else if (sym.isAliasType) {
- // return AnyVariance if `sym` is an alias type
- // that does not override anything. This is OK, because we always
- // expand aliases for variance checking.
- // However, if `sym` does override a type in a base class
- // we have to assume NoVariance, as there might then be
- // references to the type parameter that are not variance checked.
- state = if (sym.isOverridingSymbol) NoVariance else AnyVariance
- }
- sym = sym.owner
- }
- state
- }
-
- /** Validate that the type `tp` is variance-correct, assuming
- * the type occurs itself at variance position given by `variance`
- */
- def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType =>
- case WildcardType =>
- case BoundedWildcardType(bounds) =>
- validateVariance(bounds, variance)
- case NoType =>
- case NoPrefix =>
- case ThisType(_) =>
- case ConstantType(_) =>
- // case DeBruijnIndex(_, _) =>
- case SingleType(pre, sym) =>
- validateVariance(pre, variance)
- case TypeRef(pre, sym, args) =>
-// println("validate "+sym+" at "+relativeVariance(sym))
- if (sym.isAliasType/* && relativeVariance(sym) == AnyVariance*/)
- validateVariance(tp.normalize, variance)
- else if (sym.variance != NoVariance) {
- val v = relativeVariance(sym)
- if (v != AnyVariance && sym.variance != v * variance) {
- //Console.println("relativeVariance(" + base + "," + sym + ") = " + v);//DEBUG
- def tpString(tp: Type) = tp match {
- case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
- case _ => "type "+tp
- }
- unit.error(base.pos,
- varianceString(sym.variance) + " " + sym +
- " occurs in " + varianceString(v * variance) +
- " position in " + tpString(base.info) + " of " + base);
- }
- }
- validateVariance(pre, variance)
- // @M for higher-kinded typeref, args.isEmpty
- // However, these args respect variances by construction anyway
- // -- the interesting case is in type application, see checkKindBounds in Infer
- if (args.nonEmpty)
- validateVarianceArgs(args, variance, sym.typeParams)
- case ClassInfoType(parents, decls, symbol) =>
- validateVariances(parents, variance)
- case RefinedType(parents, decls) =>
- validateVariances(parents, variance)
- val saved = inRefinement
- inRefinement = true
- for (sym <- decls)
- validateVariance(sym.info, if (sym.isAliasType) NoVariance else variance)
- inRefinement = saved
- case TypeBounds(lo, hi) =>
- validateVariance(lo, -variance)
- validateVariance(hi, variance)
- case mt @ MethodType(formals, result) =>
- if (inRefinement)
- validateVariances(mt.paramTypes, -variance)
- validateVariance(result, variance)
- case NullaryMethodType(result) =>
- validateVariance(result, variance)
- case PolyType(tparams, result) =>
- // type parameters will be validated separately, because they are defined explicitly.
- validateVariance(result, variance)
- case ExistentialType(tparams, result) =>
- validateVariances(tparams map (_.info), variance)
- validateVariance(result, variance)
- case AnnotatedType(annots, tp, selfsym) =>
- if (!annots.exists(_ matches uncheckedVarianceClass))
- validateVariance(tp, variance)
- }
-
- def validateVariances(tps: List[Type], variance: Int) {
- tps foreach (tp => validateVariance(tp, variance))
- }
-
- def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) {
- foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance))
- }
-
- validateVariance(base.info, CoVariance)
+ object varianceValidator extends VarianceValidator {
+ private def tpString(tp: Type) = tp match {
+ case ClassInfoType(parents, _, clazz) => "supertype "+intersectionType(parents, clazz.owner)
+ case _ => "type "+tp
}
-
- override def traverse(tree: Tree) {
- tree match {
- case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
- validateVariance(tree.symbol)
- super.traverse(tree)
- // ModuleDefs need not be considered because they have been eliminated already
- case ValDef(_, _, _, _) =>
- if (!tree.symbol.hasLocalFlag)
- validateVariance(tree.symbol)
- case DefDef(_, _, tparams, vparamss, _, _) =>
- // No variance check for object-private/protected methods/values.
- if (!tree.symbol.hasLocalFlag) {
- validateVariance(tree.symbol)
- traverseTrees(tparams)
- traverseTreess(vparamss)
- }
- case Template(_, _, _) =>
- super.traverse(tree)
- case _ =>
- }
+ override def issueVarianceError(base: Symbol, sym: Symbol, required: Variance) {
+ currentRun.currentUnit.error(base.pos,
+ s"${sym.variance} $sym occurs in $required position in ${tpString(base.info)} of $base")
}
}
@@ -1041,7 +925,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val e = currentLevel.scope.lookupEntry(sym.name)
if ((e ne null) && sym == e.sym) {
var l = currentLevel
- while (l.scope != e.owner) l = l.outer;
+ while (l.scope != e.owner) l = l.outer
val symindex = symIndex(sym)
if (l.maxindex < symindex) {
l.refpos = pos
@@ -1057,7 +941,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def apply(tp: Type) = mapOver(tp).normalize
}
- def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match {
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint) (fn, args) match {
case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply =>
unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
case _ =>
@@ -1069,6 +953,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ // Make sure the 'eq' or 'ne' method is the one in AnyRef.
def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
def isNew(tree: Tree) = tree match {
case Function(_, _)
@@ -1088,7 +973,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol`
def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(args.head.tpe.widen)
- /** Symbols which limit the warnings we can issue since they may be value types */
+ /* Symbols which limit the warnings we can issue since they may be value types */
val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass)
// Whether def equals(other: Any) has known behavior: it is the default
@@ -1113,21 +998,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// equals method inherited from Object or a case class synthetic equals (for
// which we know the logic.)
def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
- def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
+ def isEitherNullable = (NullTpe <:< receiver.info) || (NullTpe <:< actual.info)
def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
def isScalaNumber(s: Symbol) = s isSubClass ScalaNumberClass
- // test is behind a platform guard
- def isJavaNumber(s: Symbol) = !forMSIL && (s isSubClass JavaNumberClass)
+ def isJavaNumber(s: Symbol) = s isSubClass JavaNumberClass
// includes java.lang.Number if appropriate [SI-5779]
def isAnyNumber(s: Symbol) = isScalaNumber(s) || isJavaNumber(s)
def isMaybeAnyValue(s: Symbol) = isPrimitiveValueClass(unboxedValueClass(s)) || isMaybeValue(s)
// used to short-circuit unrelatedTypes check if both sides are special
def isSpecial(s: Symbol) = isMaybeAnyValue(s) || isAnyNumber(s)
- // unused
- def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
def isNonsenseValueClassCompare = (
!haveSubclassRelationship
@@ -1174,7 +1056,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
nonSensiblyNeq()
}
else if (isNumeric(receiver)) {
- if (!isNumeric(actual) && !forMSIL)
+ if (!isNumeric(actual))
if (isUnit(actual) || isBoolean(actual) || !isMaybeValue(actual)) // 5 == "abc"
nonSensiblyNeq()
}
@@ -1201,7 +1083,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
- if (ObjectClass.tpe <:< common)
+ if (ObjectTpe <:< common)
unrelatedTypes()
}
// warn if actual has a case parent that is not same as receiver's;
@@ -1249,8 +1131,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
/* Convert a reference to a case factory of type `tpe` to a new of the class it produces. */
def toConstructor(pos: Position, tpe: Type): Tree = {
- var rtpe = tpe.finalResultType
- assert(rtpe.typeSymbol hasFlag CASE, tpe);
+ val rtpe = tpe.finalResultType
+ assert(rtpe.typeSymbol hasFlag CASE, tpe)
localTyper.typedOperator {
atPos(pos) {
Select(New(TypeTree(rtpe)), rtpe.typeSymbol.primaryConstructor)
@@ -1268,57 +1150,61 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
finally popLevel()
}
- /** Eliminate ModuleDefs.
- * - A top level object is replaced with their module class.
- * - An inner object is transformed into a module var, created on first access.
+ /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is
+ * replaced with a ClassDef (carrying the corresponding module class symbol) with additional
+ * trees created as follows:
*
- * In both cases, this transformation returns the list of replacement trees:
- * - Top level: the module class accessor definition
- * - Inner: a class definition, declaration of module var, and module var accessor
+ * 1) A statically reachable object (either top-level or nested only in objects) receives
+ * no additional trees.
+ * 2) An inner object which matches an existing member (e.g. implements an interface)
+ * receives an accessor DefDef to implement the interface.
+ * 3) An inner object otherwise receives a private ValDef which declares a module var
+ * (the field which holds the module class - it has a name like Foo$module) and an
+ * accessor for that field. The instance is created lazily, on first access.
*/
- private def eliminateModuleDefs(tree: Tree): List[Tree] = {
- val ModuleDef(mods, name, impl) = tree
- val sym = tree.symbol
- val classSym = sym.moduleClass
- val cdef = ClassDef(mods | MODULE, name.toTypeName, Nil, impl) setSymbol classSym setType NoType
-
- def findOrCreateModuleVar() = localTyper.typedPos(tree.pos) {
- // See SI-5012, SI-6712.
+ private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks {
+ val ModuleDef(_, _, impl) = moduleDef
+ val module = moduleDef.symbol
+ val site = module.owner
+ val moduleName = module.name.toTermName
+ // The typer doesn't take kindly to seeing this ClassDef; we have to
+ // set NoType so it will be ignored.
+ val cdef = ClassDef(module.moduleClass, impl) setType NoType
+
+ // Create the module var unless the immediate owner is a class and
+ // the module var already exists there. See SI-5012, SI-6712.
+ def findOrCreateModuleVar() = {
val vsym = (
- if (sym.owner.isTerm) NoSymbol
- else sym.enclClass.info.decl(nme.moduleVarName(sym.name.toTermName))
+ if (site.isTerm) NoSymbol
+ else site.info decl nme.moduleVarName(moduleName)
)
- // In case we are dealing with local symbol then we already have
- // to correct error with forward reference
- if (vsym == NoSymbol) gen.mkModuleVarDef(sym)
- else ValDef(vsym)
+ vsym orElse (site newModuleVarSymbol module)
}
- def createStaticModuleAccessor() = afterRefchecks {
- val method = (
- sym.owner.newMethod(sym.name.toTermName, sym.pos, (sym.flags | STABLE) & ~MODULE)
- setInfoAndEnter NullaryMethodType(sym.moduleClass.tpe)
- )
- localTyper.typedPos(tree.pos)(gen.mkModuleAccessDef(method, sym))
+ def newInnerObject() = {
+ // Create the module var unless it is already in the module owner's scope.
+ // The lookup is on module.enclClass and not module.owner lest there be a
+ // nullary method between us and the class; see SI-5012.
+ val moduleVar = findOrCreateModuleVar()
+ val rhs = gen.newModule(module, moduleVar.tpe)
+ val body = if (site.isTrait) rhs else gen.mkAssignAndReturn(moduleVar, rhs)
+ val accessor = DefDef(module, body.changeOwner(moduleVar -> module))
+
+ ValDef(moduleVar) :: accessor :: Nil
}
- def createInnerModuleAccessor(vdef: Tree) = List(
- vdef,
- localTyper.typedPos(tree.pos) {
- val vsym = vdef.symbol
- afterRefchecks {
- val rhs = gen.newModule(sym, vsym.tpe)
- val body = if (sym.owner.isTrait) rhs else gen.mkAssignAndReturn(vsym, rhs)
- DefDef(sym, body.changeOwner(vsym -> sym))
- }
- }
- )
- transformTrees(cdef :: {
- if (!sym.isStatic)
- createInnerModuleAccessor(findOrCreateModuleVar)
- else if (sym.isOverridingSymbol)
- List(createStaticModuleAccessor())
+ def matchingInnerObject() = {
+ val newFlags = (module.flags | STABLE) & ~MODULE
+ val newInfo = NullaryMethodType(module.moduleClass.tpe)
+ val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo
+
+ DefDef(accessor, Select(This(site), module)) :: Nil
+ }
+ val newTrees = cdef :: (
+ if (module.isStatic)
+ if (module.isOverridingSymbol) matchingInnerObject() else Nil
else
- Nil
- })
+ newInnerObject()
+ )
+ transformTrees(newTrees map localTyper.typedPos(moduleDef.pos))
}
def transformStat(tree: Tree, index: Int): List[Tree] = tree match {
@@ -1332,7 +1218,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
case ModuleDef(_, _, _) => eliminateModuleDefs(tree)
case ValDef(_, _, _, _) =>
- val tree1 @ ValDef(_, _, _, rhs) = transform(tree) // important to do before forward reference check
+ val tree1 = transform(tree) // important to do before forward reference check
if (tree1.symbol.isLazy) tree1 :: Nil
else {
val lazySym = tree.symbol.lazyAccessorOrSelf
@@ -1353,7 +1239,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
catch {
case ex: TypeError =>
unit.error(tree0.pos, ex.getMessage())
- if (settings.explaintypes.value) {
+ if (settings.explaintypes) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
(argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
(argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi))
@@ -1474,7 +1360,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// if the unnormalized type is accessible, that's good enough
if (inaccessible.isEmpty) ()
// or if the normalized type is, that's good too
- else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.normalize, member).isEmpty) ()
+ else if ((tpe ne tpe.normalize) && lessAccessibleSymsInType(tpe.dealiasWiden, member).isEmpty) ()
// otherwise warn about the inaccessible syms in the unnormalized type
else inaccessible foreach (sym => warnLessAccessible(sym, member))
}
@@ -1565,9 +1451,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case TypeApply(fun, targs) =>
isClassTypeAccessible(fun)
case Select(module, apply) =>
- // Fixes SI-5626. Classes in refinement types cannot be constructed with `new`. In this case,
- // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
- module.symbol.companionClass.isClass
+ ( // SI-4859 `CaseClass1().InnerCaseClass2()` must not be rewritten to `new InnerCaseClass2()`;
+ // {expr; Outer}.Inner() must not be rewritten to `new Outer.Inner()`.
+ treeInfo.isQualifierSafeToElide(module) &&
+ // SI-5626 Classes in refinement types cannot be constructed with `new`. In this case,
+ // the companion class is actually not a ClassSymbol, but a reference to an abstract type.
+ module.symbol.companionClass.isClass
+ )
}
val doTransform =
@@ -1611,14 +1501,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
tree
}
private def transformSelect(tree: Select): Tree = {
- val Select(qual, name) = tree
+ val Select(qual, _) = tree
val sym = tree.symbol
- /** Note: if a symbol has both @deprecated and @migration annotations and both
- * warnings are enabled, only the first one checked here will be emitted.
- * I assume that's a consequence of some code trying to avoid noise by suppressing
- * warnings after the first, but I think it'd be better if we didn't have to
- * arbitrarily choose one as more important than the other.
+ /* Note: if a symbol has both @deprecated and @migration annotations and both
+ * warnings are enabled, only the first one checked here will be emitted.
+ * I assume that's a consequence of some code trying to avoid noise by suppressing
+ * warnings after the first, but I think it'd be better if we didn't have to
+ * arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
if(settings.Xmigration.value != NoScalaVersion)
@@ -1626,18 +1516,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
checkCompileTimeOnly(sym, tree.pos)
checkDelayedInitSelect(qual, sym, tree.pos)
- if (sym eq NoSymbol) {
- unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
- }
- else if (currentClass != sym.owner && sym.hasLocalFlag) {
- var o = currentClass
- var hidden = false
- while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
- hidden = o.isTerm || o.isPrivateLocal
- o = o.owner
- }
- if (!hidden) escapedPrivateLocals += sym
- }
+ if (sym eq NoSymbol)
+ devWarning("Select node has NoSymbol! " + tree + " / " + tree.tpe)
+ else if (sym.hasLocalFlag)
+ varianceValidator.checkForEscape(sym, currentClass)
def checkSuper(mix: Name) =
// term should have been eliminated by super accessors
@@ -1653,7 +1535,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def transformIf(tree: If): Tree = {
val If(cond, thenpart, elsepart) = tree
def unitIfEmpty(t: Tree): Tree =
- if (t == EmptyTree) Literal(Constant()).setPos(tree.pos).setType(UnitClass.tpe) else t
+ if (t == EmptyTree) Literal(Constant(())).setPos(tree.pos).setType(UnitTpe) else t
cond.tpe match {
case ConstantType(value) =>
@@ -1670,8 +1552,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// on Unit, in which case we had better let it slide.
val isOk = (
sym.isGetter
- || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
|| (sym.name containsName nme.DEFAULT_GETTER_STRING)
+ || sym.allOverriddenSymbols.exists(over => !(over.tpe.resultType =:= sym.tpe.resultType))
)
if (!isOk)
unit.warning(sym.pos, s"side-effecting nullary methods are discouraged: suggest defining as `def ${sym.name.decode}()` instead")
@@ -1680,10 +1562,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Verify classes extending AnyVal meet the requirements
private def checkAnyValSubclass(clazz: Symbol) = {
- if ((clazz isSubClass AnyValClass) && !isPrimitiveValueClass(clazz)) {
+ if (clazz.isDerivedValueClass) {
if (clazz.isTrait)
unit.error(clazz.pos, "Only classes (not traits) are allowed to extend AnyVal")
- else if ((clazz != AnyValClass) && clazz.hasFlag(ABSTRACT))
+ else if (clazz.hasAbstractFlag)
unit.error(clazz.pos, "`abstract' modifier cannot be used with value classes")
}
}
@@ -1706,9 +1588,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
checkDeprecatedOvers(tree)
checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef])
- if (settings.warnNullaryUnit.value)
+ if (settings.warnNullaryUnit)
checkNullaryMethodReturnType(sym)
- if (settings.warnInaccessible.value) {
+ if (settings.warnInaccessible) {
if (!sym.isConstructor && !sym.isEffectivelyFinal && !sym.isSynthetic)
checkAccessibilityOfReferencedTypes(tree)
}
@@ -1721,6 +1603,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
+ if (currentOwner.isDerivedValueClass)
+ currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
@@ -1758,7 +1642,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
enterReference(tree.pos, tpt.tpe.typeSymbol)
tree
- case Typed(_, Ident(tpnme.WILDCARD_STAR)) if !isRepeatedParamArg(tree) =>
+ case treeInfo.WildcardStarArg(_) if !isRepeatedParamArg(tree) =>
unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
"(such annotations are only allowed in arguments to *-parameters)")
tree
@@ -1802,14 +1686,14 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
result match {
case ClassDef(_, _, _, _)
| TypeDef(_, _, _, _) =>
- if (result.symbol.isLocal || result.symbol.owner.isPackageClass)
+ if (result.symbol.isLocal || result.symbol.isTopLevel)
varianceValidator.traverse(result)
case _ =>
}
result
} catch {
case ex: TypeError =>
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
unit.error(tree.pos, ex.getMessage())
tree
} finally {
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 64c5b41638..c616ded704 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -4,7 +4,154 @@ package typechecker
trait StdAttachments {
self: Analyzer =>
- type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
+ import global._
+
+ /** Carries information necessary to expand the host tree.
+ * At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
+ * After a macro application has been successfully expanded, this attachment is destroyed.
+ */
+ type UnaffiliatedMacroContext = scala.reflect.macros.contexts.Context
type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
+
+ /** Scratchpad for the macro expander, which is used to store all intermediate data except the details about the runtime.
+ */
+ case class MacroExpanderAttachment(original: Tree, desugared: Tree, role: MacroRole)
+
+ /** Loads underlying MacroExpanderAttachment from a macro expandee or returns a default value for that attachment.
+ */
+ def macroExpanderAttachment(tree: Tree): MacroExpanderAttachment =
+ tree.attachments.get[MacroExpanderAttachment] getOrElse {
+ tree match {
+ case Apply(fn, _) if tree.isInstanceOf[ApplyToImplicitArgs] => macroExpanderAttachment(fn)
+ case _ => MacroExpanderAttachment(tree, EmptyTree, APPLY_ROLE)
+ }
+ }
+
+ /** After macro expansion is completed, links the expandee and the expansion result
+ * by annotating them both with a `MacroExpansionAttachment`.
+ */
+ def linkExpandeeAndDesugared(expandee: Tree, desugared: Tree, role: MacroRole): Unit = {
+ val metadata = MacroExpanderAttachment(expandee, desugared, role)
+ expandee updateAttachment metadata
+ desugared updateAttachment metadata
+ }
+
+ /** Is added by the macro engine to originals and results of macro expansions.
+ * Stores the original expandee as it entered the `macroExpand` function.
+ */
+ case class MacroExpansionAttachment(expandee: Tree, expanded: Any)
+
+ /** Determines whether the target is either an original or a result of a macro expansion.
+ * The parameter is of type `Any`, because macros can expand both into trees and into annotations.
+ */
+ def hasMacroExpansionAttachment(any: Any): Boolean = any match {
+ case tree: Tree => tree.attachments.get[MacroExpansionAttachment].isDefined
+ case _ => false
+ }
+
+ /** After macro expansion is completed, links the expandee and the expansion result by annotating them both with a `MacroExpansionAttachment`.
+ * The `expanded` parameter is of type `Any`, because macros can expand both into trees and into annotations.
+ */
+ def linkExpandeeAndExpanded(expandee: Tree, expanded: Any): Unit = {
+ val metadata = MacroExpansionAttachment(expandee, expanded)
+ expandee updateAttachment metadata
+ expanded match {
+ case expanded: Tree => expanded updateAttachment metadata
+ case _ => // do nothing
+ }
+ }
+
+ /** Checks whether there is any tree resulting from a macro expansion and associated with the current tree.
+ */
+ object ExpandedIntoTree {
+ def unapply(tree: Tree): Option[Tree] = tree.attachments.get[MacroExpansionAttachment] match {
+ case Some(MacroExpansionAttachment(_, tree: Tree)) => Some(tree)
+ case _ => None
+ }
+ }
+
+ /** When present, suppresses macro expansion for the host.
+ * This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
+ *
+ * Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
+ * (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
+ */
+ case object SuppressMacroExpansionAttachment
+
+ /** Suppresses macro expansion of the tree by putting SuppressMacroExpansionAttachment on it.
+ */
+ def suppressMacroExpansion(tree: Tree) = tree.updateAttachment(SuppressMacroExpansionAttachment)
+
+ /** Unsuppresses macro expansion of the tree by removing SuppressMacroExpansionAttachment from it and its children.
+ */
+ def unsuppressMacroExpansion(tree: Tree): Tree = {
+ tree.removeAttachment[SuppressMacroExpansionAttachment.type]
+ tree match {
+ // see the comment to `isMacroExpansionSuppressed` to learn why we need
+ // a special traversal strategy here
+ case Apply(fn, _) => unsuppressMacroExpansion(fn)
+ case TypeApply(fn, _) => unsuppressMacroExpansion(fn)
+ case _ => // do nothing
+ }
+ tree
+ }
+
+ /** Determines whether a tree should not be expanded, because someone has put SuppressMacroExpansionAttachment on it or one of its children.
+ */
+ def isMacroExpansionSuppressed(tree: Tree): Boolean =
+ if (tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined) true
+ else tree match {
+ // we have to account for the fact that during typechecking an expandee might become wrapped,
+ // i.e. surrounded by an inferred implicit argument application or by an inferred type argument application.
+ // in that case the expandee itself will no longer be suppressed and we need to look at the core
+ case Apply(fn, _) => isMacroExpansionSuppressed(fn)
+ case TypeApply(fn, _) => isMacroExpansionSuppressed(fn)
+ case _ => false
+ }
+
+ /** After being synthesized by the parser, primary constructors aren't fully baked yet.
+ * A call to super in such constructors is just a fill-me-in-later dummy resolved later
+ * by `parentTypes`. This attachment coordinates `parentTypes` and `typedTemplate` and
+ * allows them to complete the synthesis.
+ */
+ case class SuperArgsAttachment(argss: List[List[Tree]])
+
+ /** Convenience method for `SuperArgsAttachment`.
+ * Compared with `MacroRuntimeAttachment` this attachment has different a usage pattern,
+ * so it really benefits from a dedicated extractor.
+ */
+ def superArgs(tree: Tree): Option[List[List[Tree]]] =
+ tree.attachments.get[SuperArgsAttachment] collect { case SuperArgsAttachment(argss) => argss }
+
+ /** Determines whether the given tree has an associated SuperArgsAttachment.
+ */
+ def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
+
+ /** @see markMacroImplRef
+ */
+ case object MacroImplRefAttachment
+
+ /** Marks the tree as a macro impl reference, which is a naked reference to a method.
+ *
+ * This is necessary for typechecking macro impl references (see `DefaultMacroCompiler.defaultResolveMacroImpl`),
+ * because otherwise typing a naked reference will result in the "follow this method with `_' if you want to
+ * treat it as a partially applied function" errors.
+ *
+ * This mark suppresses adapt except for when the annottee is a macro application.
+ */
+ def markMacroImplRef(tree: Tree): Tree = tree.updateAttachment(MacroImplRefAttachment)
+
+ /** Unmarks the tree as a macro impl reference (see `markMacroImplRef` for more information).
+ *
+ * This is necessary when a tree that was previously deemed to be a macro impl reference,
+ * typechecks to be a macro application. Then we need to unmark it, expand it and try to treat
+ * its expansion as a macro impl reference.
+ */
+ def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type]
+
+ /** Determines whether a tree should or should not be adapted,
+ * because someone has put MacroImplRefAttachment on it.
+ */
+ def isMacroImplRef(tree: Tree): Boolean = tree.attachments.get[MacroImplRefAttachment.type].isDefined
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index bad49385aa..6933b10a0a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -1,9 +1,11 @@
+
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.collection.{ mutable, immutable }
@@ -28,7 +30,7 @@ import symtab.Flags._
*/
abstract class SuperAccessors extends transform.Transform with transform.TypingTransformers {
import global._
- import definitions.{ UnitClass, ObjectClass, isRepeatedParamType, isByNameParamType, Any_asInstanceOf }
+ import definitions._
import analyzer.{ restrictionError }
/** the following two members override abstract members in Transform */
@@ -60,11 +62,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = qual.symbol
val supername = nme.superName(name)
val superAcc = clazz.info.decl(supername).suchThat(_.alias == sym) orElse {
- debuglog("add super acc " + sym + sym.locationString + " to `" + clazz);//debug
- val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE) setAlias sym
+ debuglog(s"add super acc ${sym.fullLocationString} to $clazz")
+ val acc = clazz.newMethod(supername, sel.pos, SUPERACCESSOR | PRIVATE | ARTIFACT) setAlias sym
val tpe = clazz.thisType memberType sym match {
- case t if sym.isModule && !sym.isMethod => NullaryMethodType(t)
- case t => t
+ case t if sym.isModuleNotMethod => NullaryMethodType(t)
+ case t => t
}
acc setInfoAndEnter (tpe cloneInfo acc)
// Diagnostic for SI-7091
@@ -108,11 +110,11 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val clazz = sup.symbol
if (sym.isDeferred) {
- val member = sym.overridingSymbol(clazz);
+ val member = sym.overridingSymbol(clazz)
if (mix != tpnme.EMPTY || member == NoSymbol ||
!(member.isAbstractOverride && member.isIncompleteIn(clazz)))
unit.error(sel.pos, ""+sym.fullLocationString+" is accessed from super. It may not be abstract "+
- "unless it is overridden by a member declared `abstract' and `override'");
+ "unless it is overridden by a member declared `abstract' and `override'")
} else if (mix == tpnme.EMPTY && !sym.owner.isTrait){
// SI-4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract.
val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)
@@ -165,18 +167,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym))
}
}
- if (settings.verbose.value && forScaladoc && !sym.isAnonymousClass) {
- println("========== scaladoc of "+sym+" =============================")
- println(toJavaDoc(expandedDocComment(sym)))
- for (member <- sym.info.members) {
- println(member+":"+sym.thisType.memberInfo(member)+"\n"+
- toJavaDoc(expandedDocComment(member, sym)))
- for ((useCase, comment, pos) <- useCases(member, sym)) {
- println("usecase "+useCase+":"+useCase.info)
- println(toJavaDoc(comment))
- }
- }
- }
super.transform(tree)
}
transformClassDef
@@ -203,7 +193,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transformTemplate
case TypeApply(sel @ Select(This(_), name), args) =>
- mayNeedProtectedAccessor(sel, args, false)
+ mayNeedProtectedAccessor(sel, args, goToSuper = false)
// set a flag for all type parameters with `@specialized` annotation so it can be pickled
case typeDef: TypeDef if typeDef.symbol.deSkolemize.hasAnnotation(definitions.SpecializedClass) =>
@@ -231,7 +221,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// also exists in a superclass, because they may be surprised
// to find out that a constructor parameter will shadow a
// field. See SI-4762.
- if (settings.lint.value) {
+ if (settings.lint) {
if (sym.isPrivateLocal && sym.paramss.isEmpty) {
qual.symbol.ancestors foreach { parent =>
parent.info.decls filterNot (x => x.isPrivate || x.hasLocalFlag) foreach { m2 =>
@@ -260,9 +250,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
debuglog("alias replacement: " + tree + " ==> " + result); //debug
- localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, true))
+ localTyper.typed(gen.maybeMkAsInstanceOf(transformSuperSelect(result), sym.tpe, sym.alias.tpe, beforeRefChecks = true))
} else {
- /**
+ /*
* A trait which extends a class and accesses a protected member
* of that class cannot implement the necessary accessor method
* because its implementation is in an implementation class (e.g.
@@ -281,18 +271,19 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
&& !sym.owner.isTrait
&& (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
&& (qual.symbol.info.member(sym.name) ne NoSymbol)
- && !needsProtectedAccessor(sym, tree.pos))
+ && !needsProtectedAccessor(sym, tree.pos)
+ )
if (shouldEnsureAccessor) {
log("Ensuring accessor for call to protected " + sym.fullLocationString + " from " + currentClass)
ensureAccessor(sel)
}
else
- mayNeedProtectedAccessor(sel, EmptyTree.asList, false)
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = false)
}
case Super(_, mix) =>
if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) {
- if (!settings.overrideVars.value)
+ if (!settings.overrideVars)
unit.error(tree.pos, "super may be not be used on " + sym.accessedOrSelf)
} else if (isDisallowed(sym)) {
unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead")
@@ -300,7 +291,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
transformSuperSelect(sel)
case _ =>
- mayNeedProtectedAccessor(sel, EmptyTree.asList, true)
+ mayNeedProtectedAccessor(sel, EmptyTree.asList, goToSuper = true)
}
}
transformSelect
@@ -309,7 +300,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
treeCopy.DefDef(tree, mods, name, tparams, vparamss, tpt, withInvalidOwner(transform(rhs)))
case TypeApply(sel @ Select(qual, name), args) =>
- mayNeedProtectedAccessor(sel, args, true)
+ mayNeedProtectedAccessor(sel, args, goToSuper = true)
case Assign(lhs @ Select(qual, name), rhs) =>
def transformAssign = {
@@ -317,8 +308,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
lhs.symbol.isJavaDefined &&
needsProtectedAccessor(lhs.symbol, tree.pos)) {
debuglog("Adding protected setter for " + tree)
- val setter = makeSetter(lhs);
- debuglog("Replaced " + tree + " with " + setter);
+ val setter = makeSetter(lhs)
+ debuglog("Replaced " + tree + " with " + setter)
transform(localTyper.typed(Apply(setter, List(qual, rhs))))
} else
super.transform(tree)
@@ -377,14 +368,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
* typed.
*/
private def makeAccessor(tree: Select, targs: List[Tree]): Tree = {
- val Select(qual, name) = tree
+ val Select(qual, _) = tree
val sym = tree.symbol
val clazz = hostForAccessorOf(sym, currentClass)
assert(clazz != NoSymbol, sym)
debuglog("Decided for host class: " + clazz)
- val accName = nme.protName(sym.originalName)
+ val accName = nme.protName(sym.unexpandedName)
val hasArgs = sym.tpe.paramSectionCount > 0
val memberType = refChecks.toScalaRepeatedParam(sym.tpe) // fix for #2413
@@ -402,7 +393,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
val protAcc = clazz.info.decl(accName).suchThat(s => s == NoSymbol || s.tpe =:= accType(s)) orElse {
- val newAcc = clazz.newMethod(nme.protName(sym.originalName), tree.pos)
+ val newAcc = clazz.newMethod(nme.protName(sym.unexpandedName), tree.pos, newFlags = ARTIFACT)
newAcc setInfoAndEnter accType(newAcc)
val code = DefDef(newAcc, {
@@ -413,7 +404,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
args.foldLeft(base)(Apply(_, _))
})
- debuglog("" + code)
+ debuglog("created protected accessor: " + code)
storeAccessorDefinition(clazz, code)
newAcc
}
@@ -425,7 +416,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case _ => mkApply(TypeApply(selection, targs))
}
}
- debuglog("Replaced " + tree + " with " + res)
+ debuglog(s"Replaced $tree with $res")
if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
}
@@ -462,12 +453,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
assert(clazz != NoSymbol, field)
debuglog("Decided for host class: " + clazz)
- val accName = nme.protSetterName(field.originalName)
+ val accName = nme.protSetterName(field.unexpandedName)
val protectedAccessor = clazz.info decl accName orElse {
- val protAcc = clazz.newMethod(accName, field.pos)
+ val protAcc = clazz.newMethod(accName, field.pos, newFlags = ARTIFACT)
val paramTypes = List(clazz.typeOfThis, field.tpe)
val params = protAcc newSyntheticValueParams paramTypes
- val accessorType = MethodType(params, UnitClass.tpe)
+ val accessorType = MethodType(params, UnitTpe)
protAcc setInfoAndEnter accessorType
val obj :: value :: Nil = params
@@ -496,9 +487,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
def accessibleThroughSubclassing =
validCurrentOwner && clazz.thisSym.isSubClass(sym.owner) && !clazz.isTrait
- def packageAccessBoundry(sym: Symbol) =
- sym.accessBoundary(sym.enclosingPackageClass)
-
val isCandidate = (
sym.isProtected
&& sym.isJavaDefined
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 242eb9c9fe..61295b5abd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -6,10 +6,10 @@
package scala.tools.nsc
package typechecker
-import symtab.Flags
+import scala.collection.{ mutable, immutable }
import symtab.Flags._
-import scala.collection.mutable
import scala.collection.mutable.ListBuffer
+import scala.language.postfixOps
/** Synthetic method implementations for case classes and case objects.
*
@@ -94,13 +94,13 @@ trait SyntheticMethods extends ast.TreeDSL {
// like Tags and Arrays which are not robust and infer things
// which they shouldn't.
val accessorLub = (
- if (opt.experimental) {
- global.weakLub(accessors map (_.tpe.finalResultType))._1 match {
+ if (settings.Xexperimental) {
+ global.weakLub(accessors map (_.tpe.finalResultType)) match {
case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
case tp => tp
}
}
- else AnyClass.tpe
+ else AnyTpe
)
def forwardToRuntime(method: Symbol): Tree =
@@ -121,46 +121,36 @@ trait SyntheticMethods extends ast.TreeDSL {
(m0 ne meth) && !m0.isDeferred && !m0.isSynthetic && (m0.owner != AnyValClass) && (typeInClazz(m0) matches typeInClazz(meth))
}
}
- def readConstantValue[T](name: String, default: T = null.asInstanceOf[T]): T = {
- clazzMember(newTermName(name)).info match {
- case NullaryMethodType(ConstantType(Constant(value))) => value.asInstanceOf[T]
- case _ => default
- }
- }
def productIteratorMethod = {
createMethod(nme.productIterator, iteratorOfType(accessorLub))(_ =>
gen.mkMethodCall(ScalaRunTimeModule, nme.typedProductIterator, List(accessorLub), List(mkThis))
)
}
- def projectionMethod(accessor: Symbol, num: Int) = {
- createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
- }
- /** Common code for productElement and (currently disabled) productElementName
- */
+ /* Common code for productElement and (currently disabled) productElementName */
def perElementMethod(name: Name, returnType: Type)(caseFn: Symbol => Tree): Tree =
createSwitchMethod(name, accessors.indices, returnType)(idx => caseFn(accessors(idx)))
- // def productElementNameMethod = perElementMethod(nme.productElementName, StringClass.tpe)(x => LIT(x.name.toString))
+ // def productElementNameMethod = perElementMethod(nme.productElementName, StringTpe)(x => LIT(x.name.toString))
var syntheticCanEqual = false
- /** The canEqual method for case classes.
- * def canEqual(that: Any) = that.isInstanceOf[This]
+ /* The canEqual method for case classes.
+ * def canEqual(that: Any) = that.isInstanceOf[This]
*/
def canEqualMethod: Tree = {
syntheticCanEqual = true
- createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m =>
+ createMethod(nme.canEqual_, List(AnyTpe), BooleanTpe)(m =>
Ident(m.firstParam) IS_OBJ classExistentialType(clazz))
}
- /** that match { case _: this.C => true ; case _ => false }
- * where `that` is the given method's first parameter.
+ /* that match { case _: this.C => true ; case _ => false }
+ * where `that` is the given method's first parameter.
*
- * An isInstanceOf test is insufficient because it has weaker
- * requirements than a pattern match. Given an inner class Foo and
- * two different instantiations of the container, an x.Foo and and a y.Foo
- * are both .isInstanceOf[Foo], but the one does not match as the other.
+ * An isInstanceOf test is insufficient because it has weaker
+ * requirements than a pattern match. Given an inner class Foo and
+ * two different instantiations of the container, an x.Foo and and a y.Foo
+ * are both .isInstanceOf[Foo], but the one does not match as the other.
*/
def thatTest(eqmeth: Symbol): Tree = {
Match(
@@ -172,19 +162,19 @@ trait SyntheticMethods extends ast.TreeDSL {
)
}
- /** (that.asInstanceOf[this.C])
- * where that is the given methods first parameter.
+ /* (that.asInstanceOf[this.C])
+ * where that is the given methods first parameter.
*/
def thatCast(eqmeth: Symbol): Tree =
gen.mkCast(Ident(eqmeth.firstParam), clazz.tpe)
- /** The equality method core for case classes and inline clases.
- * 1+ args:
- * (that.isInstanceOf[this.C]) && {
- * val x$1 = that.asInstanceOf[this.C]
- * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
- * }
- * Drop canBuildFrom part if class is final and canBuildFrom is synthesized
+ /* The equality method core for case classes and inline clases.
+ * 1+ args:
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+ * }
+ * Drop canBuildFrom part if class is final and canBuildFrom is synthesized
*/
def equalsCore(eqmeth: Symbol, accessors: List[Symbol]) = {
val otherName = context.unit.freshTermName(clazz.name + "$")
@@ -199,18 +189,18 @@ trait SyntheticMethods extends ast.TreeDSL {
)
}
- /** The equality method for case classes.
- * 0 args:
- * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
- * 1+ args:
- * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
- * (that.isInstanceOf[this.C]) && {
- * val x$1 = that.asInstanceOf[this.C]
- * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
- * }
- * }
+ /* The equality method for case classes.
+ * 0 args:
+ * def equals(that: Any) = that.isInstanceOf[this.C] && that.asInstanceOf[this.C].canEqual(this)
+ * 1+ args:
+ * def equals(that: Any) = (this eq that.asInstanceOf[AnyRef]) || {
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.arg_1 == x$1.arg_1) && (this.arg_2 == x$1.arg_2) && ... && (x$1 canEqual this)
+ * }
+ * }
*/
- def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ def equalsCaseClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
if (accessors.isEmpty)
if (clazz.isFinal) thatTest(m)
else thatTest(m) AND ((thatCast(m) DOT nme.canEqual_)(mkThis))
@@ -218,30 +208,35 @@ trait SyntheticMethods extends ast.TreeDSL {
(mkThis ANY_EQ Ident(m.firstParam)) OR equalsCore(m, accessors)
}
- /** The equality method for value classes
- * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
- * (that.isInstanceOf[this.C]) && {
- * val x$1 = that.asInstanceOf[this.C]
- * (this.underlying == that.underlying
+ /* The equality method for value classes
+ * def equals(that: Any) = (this.asInstanceOf[AnyRef]) eq that.asInstanceOf[AnyRef]) || {
+ * (that.isInstanceOf[this.C]) && {
+ * val x$1 = that.asInstanceOf[this.C]
+ * (this.underlying == that.underlying
*/
- def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m =>
+ def equalsDerivedValueClassMethod: Tree = createMethod(nme.equals_, List(AnyTpe), BooleanTpe) { m =>
equalsCore(m, List(clazz.derivedValueClassUnbox))
}
- /** The hashcode method for value classes
+ /* The hashcode method for value classes
* def hashCode(): Int = this.underlying.hashCode
*/
- def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
+ def hashCodeDerivedValueClassMethod: Tree = createMethod(nme.hashCode_, Nil, IntTpe) { m =>
Select(mkThisSelect(clazz.derivedValueClassUnbox), nme.hashCode_)
}
- /** The _1, _2, etc. methods to implement ProductN, disabled
- * until we figure out how to introduce ProductN without cycles.
+ /* The _1, _2, etc. methods to implement ProductN, disabled
+ * until we figure out how to introduce ProductN without cycles.
*/
- def productNMethods = {
+ /****
+ def productNMethods = {
val accs = accessors.toIndexedSeq
1 to arity map (num => productProj(arity, num) -> (() => projectionMethod(accs(num - 1), num)))
}
+ def projectionMethod(accessor: Symbol, num: Int) = {
+ createMethod(nme.productAccessorName(num), accessor.tpe.resultType)(_ => REF(accessor))
+ }
+ ****/
// methods for both classes and objects
def productMethods = {
@@ -259,19 +254,20 @@ trait SyntheticMethods extends ast.TreeDSL {
def hashcodeImplementation(sym: Symbol): Tree = {
sym.tpe.finalResultType.typeSymbol match {
- case UnitClass | NullClass => Literal(Constant(0))
- case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
- case IntClass | ShortClass | ByteClass | CharClass => Ident(sym)
- case LongClass => callStaticsMethod("longHash")(Ident(sym))
- case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
- case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
- case _ => callStaticsMethod("anyHash")(Ident(sym))
+ case UnitClass | NullClass => Literal(Constant(0))
+ case BooleanClass => If(Ident(sym), Literal(Constant(1231)), Literal(Constant(1237)))
+ case IntClass => Ident(sym)
+ case ShortClass | ByteClass | CharClass => Select(Ident(sym), nme.toInt)
+ case LongClass => callStaticsMethod("longHash")(Ident(sym))
+ case DoubleClass => callStaticsMethod("doubleHash")(Ident(sym))
+ case FloatClass => callStaticsMethod("floatHash")(Ident(sym))
+ case _ => callStaticsMethod("anyHash")(Ident(sym))
}
}
def specializedHashcode = {
- createMethod(nme.hashCode_, Nil, IntClass.tpe) { m =>
- val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntClass.tpe
+ createMethod(nme.hashCode_, Nil, IntTpe) { m =>
+ val accumulator = m.newVariable(newTermName("acc"), m.pos, SYNTHETIC) setInfo IntTpe
val valdef = ValDef(accumulator, Literal(Constant(0xcafebabe)))
val mixes = accessors map (acc =>
Assign(
@@ -313,11 +309,11 @@ trait SyntheticMethods extends ast.TreeDSL {
// Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam)))
)
- /** If you serialize a singleton and then deserialize it twice,
- * you will have two instances of your singleton unless you implement
- * readResolve. Here it is implemented for all objects which have
- * no implementation and which are marked serializable (which is true
- * for all case objects.)
+ /* If you serialize a singleton and then deserialize it twice,
+ * you will have two instances of your singleton unless you implement
+ * readResolve. Here it is implemented for all objects which have
+ * no implementation and which are marked serializable (which is true
+ * for all case objects.)
*/
def needsReadResolve = (
clazz.isModuleClass
@@ -335,18 +331,20 @@ trait SyntheticMethods extends ast.TreeDSL {
else Nil
)
- /** Always generate overrides for equals and hashCode in value classes,
- * so they can appear in universal traits without breaking value semantics.
+ /* Always generate overrides for equals and hashCode in value classes,
+ * so they can appear in universal traits without breaking value semantics.
*/
def impls = {
def shouldGenerate(m: Symbol) = {
!hasOverridingImplementation(m) || {
clazz.isDerivedValueClass && (m == Any_hashCode || m == Any_equals) && {
- if (settings.lint.value) {
- (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
- currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
- }
- }
+ // Without a means to suppress this warning, I've thought better of it.
+ //
+ // if (settings.lint) {
+ // (clazz.info nonPrivateMember m.name) filter (m => (m.owner != AnyClass) && (m.owner != clazz) && !m.isDeferred) andAlso { m =>
+ // currentUnit.warning(clazz.pos, s"Implementation of ${m.name} inherited from ${m.owner} overridden in $clazz to enforce value class semantics")
+ // }
+ // }
true
}
}
@@ -359,7 +357,7 @@ trait SyntheticMethods extends ast.TreeDSL {
// This method should be generated as private, but apparently if it is, then
// it is name mangled afterward. (Wonder why that is.) So it's only protected.
// For sure special methods like "readResolve" should not be mangled.
- List(createMethod(nme.readResolve, Nil, ObjectClass.tpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
+ List(createMethod(nme.readResolve, Nil, ObjectTpe)(m => { m setFlag PRIVATE ; REF(clazz.sourceModule) }))
}
else Nil
)
@@ -368,11 +366,11 @@ trait SyntheticMethods extends ast.TreeDSL {
catch { case _: TypeError if reporter.hasErrors => Nil }
}
- /** If this case class has any less than public accessors,
- * adds new accessors at the correct locations to preserve ordering.
- * Note that this must be done before the other method synthesis
- * because synthesized methods need refer to the new symbols.
- * Care must also be taken to preserve the case accessor order.
+ /* If this case class has any less than public accessors,
+ * adds new accessors at the correct locations to preserve ordering.
+ * Note that this must be done before the other method synthesis
+ * because synthesized methods need refer to the new symbols.
+ * Care must also be taken to preserve the case accessor order.
*/
def caseTemplateBody(): List[Tree] = {
val lb = ListBuffer[Tree]()
diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
index d82fbd7c77..32a66aa4dd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala
@@ -10,16 +10,16 @@ trait Tags {
trait Tag {
self: Typer =>
- private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = beforeTyper {
+ private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper {
def wrapper (tree: => Tree): Tree = if (allowMaterialization) (context.withMacrosEnabled[Tree](tree)) else (context.withMacrosDisabled[Tree](tree))
wrapper(inferImplicit(
EmptyTree,
taggedTp,
- /*reportAmbiguous =*/ true,
- /*isView =*/ false,
- /*context =*/ context,
- /*saveAmbiguousDivergent =*/ true,
- /*pos =*/ pos
+ reportAmbiguous = true,
+ isView = false,
+ context,
+ saveAmbiguousDivergent = true,
+ pos
).tree)
}
@@ -30,7 +30,7 @@ trait Tags {
* However we found out that we don't really need this concept, so it got removed.
*
* @param pos Position for error reporting. Please, provide meaningful value.
- * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntClass.tpe) will look for ClassTag[Int].
+ * @param tp Type we're looking a ClassTag for, e.g. resolveClassTag(pos, IntTpe) will look for ClassTag[Int].
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no class tag in scope.
* If false then materialization macros are prohibited from running.
*
@@ -49,7 +49,7 @@ trait Tags {
* @param pre Prefix that represents a universe this type tag will be bound to.
* If `pre` is set to `NoType`, then any type tag in scope will do, regardless of its affiliation.
* If `pre` is set to `NoType`, and tag resolution involves materialization, then `mkRuntimeUniverseRef` will be used.
- * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntClass.tpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
+ * @param tp Type we're looking a TypeTag for, e.g. resolveTypeTag(pos, mkRuntimeUniverseRef, IntTpe, false) will look for scala.reflect.runtime.universe.TypeTag[Int].
* @param concrete If true then the result must not contain unresolved (i.e. not spliced) type parameters and abstract type members.
* If false then the function will always succeed (abstract types will be reified as free types).
* @param allowMaterialization If true (default) then the resolver is allowed to launch materialization macros when there's no type tag in scope.
@@ -69,4 +69,4 @@ trait Tags {
resolveTag(pos, taggedTp, allowMaterialization)
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 88d10f1d72..1c8d37ef39 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package typechecker
-import scala.tools.nsc.symtab.Flags._
import scala.collection.mutable
import mutable.ListBuffer
import util.returning
@@ -70,7 +69,7 @@ abstract class TreeCheckers extends Analyzer {
// new symbols
if (newSyms.nonEmpty) {
informFn(newSyms.size + " new symbols.")
- val toPrint = if (settings.debug.value) sortedNewSyms mkString " " else ""
+ val toPrint = if (settings.debug) sortedNewSyms mkString " " else ""
newSyms.clear()
if (toPrint != "")
@@ -121,7 +120,7 @@ abstract class TreeCheckers extends Analyzer {
def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
def informFn(msg: Any) {
- if (settings.verbose.value || settings.debug.value)
+ if (settings.verbose || settings.debug)
println("[check: %s] %s".format(phase.prev, msg))
}
@@ -138,25 +137,18 @@ abstract class TreeCheckers extends Analyzer {
}
def checkTrees() {
- if (settings.verbose.value)
+ if (settings.verbose)
Console.println("[consistency check at the beginning of phase " + phase + "]")
currentRun.units foreach (x => wrap(x)(check(x)))
}
- def printingTypings[T](body: => T): T = {
- val saved = global.printTypings
- global.printTypings = true
- val result = body
- global.printTypings = saved
- result
- }
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
- currentRun.advanceUnit
+ currentRun.advanceUnit()
assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit)
currentRun.currentUnit = unit0
}
@@ -164,7 +156,7 @@ abstract class TreeCheckers extends Analyzer {
informProgress("checking "+unit)
val context = rootContext(unit)
context.checking = true
- tpeOfTree.clear
+ tpeOfTree.clear()
SymbolTracker.check(phase, unit)
val checker = new TreeChecker(context)
runWithUnit(unit) {
@@ -189,10 +181,6 @@ abstract class TreeCheckers extends Analyzer {
errorFn(t1.pos, "trees differ\n old: " + treestr(t1) + "\n new: " + treestr(t2))
private def typesDiffer(tree: Tree, tp1: Type, tp2: Type) =
errorFn(tree.pos, "types differ\n old: " + tp1 + "\n new: " + tp2 + "\n tree: " + tree)
- private def ownersDiffer(tree: Tree, shouldBe: Symbol) = {
- val sym = tree.symbol
- errorFn(tree.pos, sym + " has wrong owner: " + ownerstr(sym.owner) + ", should be: " + ownerstr(shouldBe))
- }
/** XXX Disabled reporting of position errors until there is less noise. */
private def noPos(t: Tree) =
@@ -204,14 +192,11 @@ abstract class TreeCheckers extends Analyzer {
if (t.symbol == NoSymbol)
errorFn(t.pos, "no symbol: " + treestr(t))
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = returning(tree) {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = returning(tree) {
case EmptyTree | TypeTree() => ()
case _ if tree.tpe != null =>
- tpeOfTree.getOrElseUpdate(tree, {
- val saved = tree.tpe
- tree.tpe = null
- saved
- })
+ tpeOfTree.getOrElseUpdate(tree, try tree.tpe finally tree.clearType())
+
wrap(tree)(super.typed(tree, mode, pt) match {
case _: Literal => ()
case x if x ne tree => treesDiffer(tree, x)
@@ -236,7 +221,7 @@ abstract class TreeCheckers extends Analyzer {
case _: ConstantType => ()
case _ =>
checkSym(tree)
- /** XXX: lots of syms show up here with accessed == NoSymbol. */
+ /* XXX: lots of syms show up here with accessed == NoSymbol. */
if (accessed != NoSymbol) {
val agetter = accessed.getter(sym.owner)
val asetter = accessed.setter(sym.owner)
@@ -263,7 +248,7 @@ abstract class TreeCheckers extends Analyzer {
else if (currentOwner.ownerChain takeWhile (_ != sym) exists (_ == NoSymbol))
return fail("tree symbol "+sym+" does not point to enclosing class; tree = ")
- /** XXX: temporary while Import nodes are arriving untyped. */
+ /* XXX: temporary while Import nodes are arriving untyped. */
case Import(_, _) =>
return
case _ =>
@@ -284,7 +269,7 @@ abstract class TreeCheckers extends Analyzer {
def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
if (sym.owner != currentOwner) {
- val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse fail("DefTree can't find owner: ")
+ val expected = currentOwner.ownerChain find (x => cond(x)) getOrElse { fail("DefTree can't find owner: ") ; NoSymbol }
if (sym.owner != expected)
fail(sm"""|
| currentOwner chain: ${currentOwner.ownerChain take 3 mkString " -> "}
@@ -298,7 +283,6 @@ abstract class TreeCheckers extends Analyzer {
private def checkSymbolRefsRespectScope(tree: Tree) {
def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
- def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
val info = Option(symbolOf(tree).info).getOrElse(NoType)
val referencedSymbols: List[Symbol] = {
val directRef = tree match {
@@ -344,7 +328,7 @@ abstract class TreeCheckers extends Analyzer {
if (oldtpe =:= tree.tpe) ()
else typesDiffer(tree, oldtpe, tree.tpe)
- tree.tpe = oldtpe
+ tree setType oldtpe
super.traverse(tree)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4950a7efef..a93baabc51 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -8,7 +8,6 @@ package typechecker
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
-import scala.util.control.ControlThrowable
import scala.util.control.Exception.ultimately
import symtab.Flags._
import PartialFunction._
@@ -37,15 +36,6 @@ trait TypeDiagnostics {
import global._
import definitions._
- import global.typer.{ infer, context }
-
- /** The common situation of making sure nothing is erroneous could be
- * nicer if Symbols, Types, and Trees all implemented some common interface
- * in which isErroneous and similar would be placed.
- */
- def noErroneousTypes(tps: Type*) = tps forall (x => !x.isErroneous)
- def noErroneousSyms(syms: Symbol*) = syms forall (x => !x.isErroneous)
- def noErroneousTrees(trees: Tree*) = trees forall (x => !x.isErroneous)
/** For errors which are artifacts of the implementation: such messages
* indicate that the restriction may be lifted in the future.
@@ -58,7 +48,7 @@ trait TypeDiagnostics {
/** A map of Positions to addendums - if an error involves a position in
* the map, the addendum should also be printed.
*/
- private var addendums = perRunCaches.newMap[Position, () => String]()
+ private val addendums = perRunCaches.newMap[Position, () => String]()
private var isTyperInPattern = false
/** Devising new ways of communicating error info out of
@@ -136,7 +126,7 @@ trait TypeDiagnostics {
else if (!member.isDeferred) member.accessed
else {
val getter = if (member.isSetter) member.getter(member.owner) else member
- val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED | MUTABLE else DEFERRED
+ val flags = if (getter.setter(member.owner) != NoSymbol) DEFERRED.toLong | MUTABLE else DEFERRED
getter.owner.newValue(getter.name.toTermName, getter.pos, flags) setInfo getter.tpe.resultType
}
@@ -174,11 +164,6 @@ trait TypeDiagnostics {
case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
}
- def varianceWord(sym: Symbol): String =
- if (sym.variance == 1) "covariant"
- else if (sym.variance == -1) "contravariant"
- else "invariant"
-
def explainAlias(tp: Type) = {
// Don't automatically normalize standard aliases; they still will be
// expanded if necessary to disambiguate simple identifiers.
@@ -223,12 +208,12 @@ trait TypeDiagnostics {
// force measures than comparing normalized Strings were producing error messages
// like "and java.util.ArrayList[String] <: java.util.ArrayList[String]" but there
// should be a cleaner way to do this.
- if (found.normalize.toString == tp.normalize.toString) ""
+ if (found.dealiasWiden.toString == tp.dealiasWiden.toString) ""
else " (and %s <: %s)".format(found, tp)
)
val explainDef = {
val prepend = if (isJava) "Java-defined " else ""
- "%s%s is %s in %s.".format(prepend, reqsym, varianceWord(param), param)
+ "%s%s is %s in %s.".format(prepend, reqsym, param.variance, param)
}
// Don't suggest they change the class declaration if it's somewhere
// under scala.* or defined in a java class, because attempting either
@@ -248,11 +233,11 @@ trait TypeDiagnostics {
|| ((arg <:< reqArg) && param.isCovariant)
|| ((reqArg <:< arg) && param.isContravariant)
)
- val invariant = param.variance == 0
+ val invariant = param.variance.isInvariant
if (conforms) Some("")
- else if ((arg <:< reqArg) && invariant) mkMsg(true) // covariant relationship
- else if ((reqArg <:< arg) && invariant) mkMsg(false) // contravariant relationship
+ else if ((arg <:< reqArg) && invariant) mkMsg(isSubtype = true) // covariant relationship
+ else if ((reqArg <:< arg) && invariant) mkMsg(isSubtype = false) // contravariant relationship
else None // we assume in other cases our ham-fisted advice will merely serve to confuse
}
val messages = relationships.flatten
@@ -268,7 +253,7 @@ trait TypeDiagnostics {
// For found/required errors where AnyRef would have sufficed:
// explain in greater detail.
def explainAnyVsAnyRef(found: Type, req: Type): String = {
- if (AnyRefClass.tpe <:< req) notAnyRefMessage(found) else ""
+ if (AnyRefTpe <:< req) notAnyRefMessage(found) else ""
}
// TODO - figure out how to avoid doing any work at all
@@ -309,7 +294,6 @@ trait TypeDiagnostics {
// distinguished from the other types in the same error message
private val savedName = sym.name
def restoreName() = sym.name = savedName
- def isAltered = sym.name != savedName
def modifyName(f: String => String) = sym setName newTypeName(f(sym.name.toString))
/** Prepend java.lang, scala., or Predef. if this type originated
@@ -439,6 +423,122 @@ trait TypeDiagnostics {
def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) =
contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
+ object checkUnused {
+ val ignoreNames = Set[TermName]("readResolve", "readObject", "writeObject", "writeReplace")
+
+ class UnusedPrivates extends Traverser {
+ val defnTrees = ListBuffer[MemberDef]()
+ val targets = mutable.Set[Symbol]()
+ val setVars = mutable.Set[Symbol]()
+ val treeTypes = mutable.Set[Type]()
+
+ def defnSymbols = defnTrees.toList map (_.symbol)
+ def localVars = defnSymbols filter (t => t.isLocal && t.isVar)
+
+ def qualifiesTerm(sym: Symbol) = (
+ (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocal)
+ && !nme.isLocalName(sym.name)
+ && !sym.isParameter
+ && !sym.isParamAccessor // could improve this, but it's a pain
+ && !sym.isEarlyInitialized // lots of false positives in the way these are encoded
+ && !(sym.isGetter && sym.accessed.isEarlyInitialized)
+ )
+ def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage
+ def qualifies(sym: Symbol) = (
+ (sym ne null)
+ && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym))
+ )
+
+ override def traverse(t: Tree): Unit = {
+ t match {
+ case t: MemberDef if qualifies(t.symbol) => defnTrees += t
+ case t: RefTree if t.symbol ne null => targets += t.symbol
+ case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol
+ case _ =>
+ }
+ // Only record type references which don't originate within the
+ // definition of the class being referenced.
+ if (t.tpe ne null) {
+ for (tp <- t.tpe ; if !treeTypes(tp) && !currentOwner.ownerChain.contains(tp.typeSymbol)) {
+ tp match {
+ case NoType | NoPrefix =>
+ case NullaryMethodType(_) =>
+ case MethodType(_, _) =>
+ case _ =>
+ log(s"$tp referenced from $currentOwner")
+ treeTypes += tp
+ }
+ }
+ // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused.
+ t.tpe.prefix foreach {
+ case SingleType(_, sym) => targets += sym
+ case _ =>
+ }
+ }
+ super.traverse(t)
+ }
+ def isUnusedType(m: Symbol): Boolean = (
+ m.isType
+ && !m.isTypeParameterOrSkolem // would be nice to improve this
+ && (m.isPrivate || m.isLocal)
+ && !(treeTypes.exists(tp => tp exists (t => t.typeSymbolDirect == m)))
+ )
+ def isUnusedTerm(m: Symbol): Boolean = (
+ (m.isTerm)
+ && (m.isPrivate || m.isLocal)
+ && !targets(m)
+ && !(m.name == nme.WILDCARD) // e.g. val _ = foo
+ && !ignoreNames(m.name.toTermName) // serialization methods
+ && !isConstantType(m.info.resultType) // subject to constant inlining
+ && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar
+ )
+ def unusedTypes = defnTrees.toList filter (t => isUnusedType(t.symbol))
+ def unusedTerms = defnTrees.toList filter (v => isUnusedTerm(v.symbol))
+ // local vars which are never set, except those already returned in unused
+ def unsetVars = localVars filter (v => !setVars(v) && !isUnusedTerm(v))
+ }
+
+ def apply(unit: CompilationUnit) = {
+ warnUnusedImports(unit)
+
+ val p = new UnusedPrivates
+ p traverse unit.body
+ val unused = p.unusedTerms
+ unused foreach { defn: DefTree =>
+ val sym = defn.symbol
+ val isDefaultGetter = sym.name containsName nme.DEFAULT_GETTER_STRING
+ val pos = (
+ if (defn.pos.isDefined) defn.pos
+ else if (sym.pos.isDefined) sym.pos
+ else sym match {
+ case sym: TermSymbol => sym.referenced.pos
+ case _ => NoPosition
+ }
+ )
+ val why = if (sym.isPrivate) "private" else "local"
+ val what = (
+ if (isDefaultGetter) "default argument"
+ else if (sym.isConstructor) "constructor"
+ else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var"
+ else if (sym.isVal || sym.isGetter && sym.accessed.isVal) "val"
+ else if (sym.isSetter) "setter"
+ else if (sym.isMethod) "method"
+ else if (sym.isModule) "object"
+ else "term"
+ )
+ unit.warning(pos, s"$why $what in ${sym.owner} is never used")
+ }
+ p.unsetVars foreach { v =>
+ unit.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set - it could be a val")
+ }
+ p.unusedTypes foreach { t =>
+ val sym = t.symbol
+ val why = if (sym.isPrivate) "private" else "local"
+ unit.warning(t.pos, s"$why ${sym.fullLocationString} is never used")
+ }
+ }
+ }
+
object checkDead {
private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
// The method being applied to `tree` when `apply` is called.
@@ -463,17 +563,13 @@ trait TypeDiagnostics {
// Error suppression will squash some of these warnings unless we circumvent it.
// It is presumed if you are using a -Y option you would really like to hear
// the warnings you've requested.
- if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK)
- context.warning(tree.pos, "dead code following this construct", true)
+ if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK)
+ context.warning(tree.pos, "dead code following this construct", force = true)
tree
}
// The checkDead call from typedArg is more selective.
- def inMode(mode: Int, tree: Tree): Tree = {
- val modeOK = (mode & (EXPRmode | BYVALmode | POLYmode)) == (EXPRmode | BYVALmode)
- if (modeOK) apply(tree)
- else tree
- }
+ def inMode(mode: Mode, tree: Tree): Tree = if (mode.typingMonoExprByValue) apply(tree) else tree
}
private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded
@@ -494,7 +590,7 @@ trait TypeDiagnostics {
/** Report a type error.
*
- * @param pos0 The position where to report the error
+ * @param pos The position where to report the error
* @param ex The exception that caused the error
*/
def reportTypeError(context0: Context, pos: Position, ex: TypeError) {
@@ -503,7 +599,7 @@ trait TypeDiagnostics {
// but it seems that throwErrors excludes some of the errors that should actually be
// buffered, causing TypeErrors to fly around again. This needs some more investigation.
if (!context0.reportErrors) throw ex
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
ex match {
case CyclicReference(sym, info: TypeCompleter) =>
diff --git a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
index 60399f53bf..afe6875218 100644
--- a/src/compiler/scala/tools/nsc/interpreter/TypeStrings.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala
@@ -4,7 +4,7 @@
*/
package scala.tools.nsc
-package interpreter
+package typechecker
import java.lang.{ reflect => r }
import r.TypeVariable
@@ -12,16 +12,12 @@ import scala.reflect.NameTransformer
import NameTransformer._
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
-import typechecker.DestructureTypes
-import scala.reflect.internal.util.StringOps.ojoin
-import scala.language.implicitConversions
/** A more principled system for turning types into strings.
*/
trait StructuredTypeStrings extends DestructureTypes {
val global: Global
import global._
- import definitions._
case class LabelAndType(label: String, typeName: String) { }
object LabelAndType {
@@ -33,13 +29,11 @@ trait StructuredTypeStrings extends DestructureTypes {
else elems.mkString(ldelim, mdelim, rdelim)
)
}
- val NoGrouping = Grouping("", "", "", false)
- val ListGrouping = Grouping("(", ", ", ")", false)
- val ProductGrouping = Grouping("(", ", ", ")", true)
- val ParamGrouping = Grouping("(", ", ", ")", true)
- val BlockGrouping = Grouping(" { ", "; ", "}", false)
+ val NoGrouping = Grouping("", "", "", labels = false)
+ val ListGrouping = Grouping("(", ", ", ")", labels = false)
+ val ProductGrouping = Grouping("(", ", ", ")", labels = true)
+ val BlockGrouping = Grouping(" { ", "; ", "}", labels = false)
- private implicit def lowerName(n: Name): String = "" + n
private def str(level: Int)(body: => String): String = " " * level + body
private def block(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
val l1 = str(level)(name + grouping.ldelim)
@@ -49,7 +43,6 @@ trait StructuredTypeStrings extends DestructureTypes {
l1 +: l2 :+ l3 mkString "\n"
}
private def maybeBlock(level: Int, grouping: Grouping)(name: String, nodes: List[TypeNode]): String = {
- import grouping._
val threshold = 70
val try1 = str(level)(name + grouping.join(nodes map (_.show(0, grouping.labels)): _*))
@@ -57,10 +50,9 @@ trait StructuredTypeStrings extends DestructureTypes {
else block(level, grouping)(name, nodes)
}
private def shortClass(x: Any) = {
- if (opt.debug) {
+ if (settings.debug) {
val name = (x.getClass.getName split '.').last
- val isAnon = name.reverse takeWhile (_ != '$') forall (_.isDigit)
- val str = if (isAnon) name else (name split '$').last
+ val str = if (TypeStrings.isAnonClass(x.getClass)) name else (name split '$').last
" // " + str
}
@@ -72,7 +64,7 @@ trait StructuredTypeStrings extends DestructureTypes {
def nodes: List[TypeNode]
def show(indent: Int, showLabel: Boolean): String = maybeBlock(indent, grouping)(mkPrefix(showLabel), nodes)
- def show(indent: Int): String = show(indent, true)
+ def show(indent: Int): String = show(indent, showLabel = true)
def show(): String = show(0)
def withLabel(l: String): this.type = modifyNameInfo(_.copy(label = l))
@@ -146,7 +138,7 @@ trait StructuredTypeStrings extends DestructureTypes {
def wrapAtom[U](value: U) = new TypeAtom(value)
}
- def show(tp: Type): String = intoNodes(tp).show
+ def show(tp: Type): String = intoNodes(tp).show()
}
@@ -158,11 +150,11 @@ trait StructuredTypeStrings extends DestructureTypes {
* "definition" is when you want strings like
*/
trait TypeStrings {
+ private type JClass = java.lang.Class[_]
private val ObjectClass = classOf[java.lang.Object]
private val primitives = Set[String]("byte", "char", "short", "int", "long", "float", "double", "boolean", "void")
- private val primitiveMap = primitives.toList map { x =>
+ private val primitiveMap = (primitives.toList map { x =>
val key = x match {
- case "void" => "Void"
case "int" => "Integer"
case "char" => "Character"
case s => s.capitalize
@@ -173,7 +165,12 @@ trait TypeStrings {
}
("java.lang." + key) -> ("scala." + value)
- } toMap
+ }).toMap
+
+ def isAnonClass(cl: Class[_]) = {
+ val xs = cl.getName.reverse takeWhile (_ != '$')
+ xs.nonEmpty && xs.forall(_.isDigit)
+ }
def scalaName(s: String): String = {
if (s endsWith MODULE_SUFFIX_STRING) s.init + ".type"
@@ -184,18 +181,16 @@ trait TypeStrings {
// Trying to put humpty dumpty back together again.
def scalaName(clazz: JClass): String = {
val name = clazz.getName
- val isAnon = clazz.isScalaAnonymous
val enclClass = clazz.getEnclosingClass
def enclPre = enclClass.getName + MODULE_SUFFIX_STRING
def enclMatch = name startsWith enclPre
scalaName(
- if (enclClass == null || isAnon || !enclMatch) name
+ if (enclClass == null || isAnonClass(clazz) || !enclMatch) name
else enclClass.getName + "." + (name stripPrefix enclPre)
)
}
- def scalaName(ct: ClassTag[_]): String = scalaName(ct.runtimeClass)
- def anyClass(x: Any): JClass = if (x == null) null else x.getClass
+ def anyClass(x: Any): JClass = if (x == null) null else x.getClass
private def brackets(tps: String*): String =
if (tps.isEmpty) ""
@@ -212,14 +207,8 @@ trait TypeStrings {
}
private def tparamString[T: ru.TypeTag] : String = {
- def typeArguments: List[ru.Type] = {
- import ru.TypeRefTag // otherwise the pattern match will be unchecked
- // because TypeRef is an abstract type
- ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
- }
- // [Eugene to Paul] need to use not the `rootMirror`, but a mirror with the REPL's classloader
- // how do I get to it? acquiring context classloader seems unreliable because of multithreading
- def typeVariables: List[java.lang.Class[_]] = typeArguments map (targ => ru.rootMirror.runtimeClass(targ))
+ import ru._ // get TypeRefTag in scope so that pattern match works (TypeRef is an abstract type)
+ def typeArguments: List[ru.Type] = ru.typeOf[T] match { case ru.TypeRef(_, _, args) => args; case _ => Nil }
brackets(typeArguments map (jc => tvarString(List(jc))): _*)
}
@@ -231,7 +220,6 @@ trait TypeStrings {
* practice to rely on toString for correctness) generated the VALID string
* representation of the type.
*/
- def fromTypedValue[T: ru.TypeTag : ClassTag](x: T): String = fromTag[T]
def fromValue(value: Any): String = if (value == null) "Null" else fromClazz(anyClass(value))
def fromClazz(clazz: JClass): String = scalaName(clazz) + tparamString(clazz)
def fromTag[T: ru.TypeTag : ClassTag] : String = scalaName(classTag[T].runtimeClass) + tparamString[T]
@@ -251,13 +239,6 @@ trait TypeStrings {
case (res, (k, v)) => res.replaceAll(k, v)
}
}
-
- val typeTransforms = List(
- "java.lang." -> "",
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic."
- )
}
object TypeStrings extends TypeStrings { }
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 29cd3d4bfa..ec6b78f2de 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -9,13 +9,15 @@
// Added: Thu Apr 12 18:23:58 2007
//todo: disallow C#D in superclass
//todo: treat :::= correctly
-package scala.tools.nsc
+package scala
+package tools.nsc
package typechecker
import scala.collection.mutable
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
import mutable.ListBuffer
import symtab.Flags._
+import Mode._
// Suggestion check whether we can do without priming scopes with symbols of outer scopes,
// like the IDE does.
@@ -24,17 +26,17 @@ import symtab.Flags._
* @author Martin Odersky
* @version 1.0
*/
-trait Typers extends Modes with Adaptations with Tags {
+trait Typers extends Adaptations with Tags {
self: Analyzer =>
import global._
import definitions._
import TypersStats._
- final def forArgMode(fun: Tree, mode: Int) =
- if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
- else mode
+ final def forArgMode(fun: Tree, mode: Mode) =
+ if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode
+ // printResult(s"forArgMode($fun, $mode) gets SCCmode")(mode | SCCmode)
// namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result
// is cached here and re-used in typedDefDef / typedValDef
// Also used to cache imports type-checked by namer.
@@ -52,34 +54,41 @@ trait Typers extends Modes with Adaptations with Tags {
object UnTyper extends Traverser {
override def traverse(tree: Tree) = {
- if (tree != EmptyTree) tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
+ if (tree.canHaveAttrs) {
+ tree.clearType()
+ if (tree.hasSymbolField) tree.symbol = NoSymbol
+ }
super.traverse(tree)
}
}
-/* needed for experimental version where early types can be type arguments
- class EarlyMap(clazz: Symbol) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) =>
- TypeRef(ThisType(clazz), sym, List())
- case _ =>
- mapOver(tp)
+
+ sealed abstract class SilentResult[+T] {
+ @inline final def map[U](f: T => U): SilentResult[U] = this match {
+ case SilentResultValue(value) => SilentResultValue(f(value))
+ case x: SilentTypeError => x
+ }
+ @inline final def filter(p: T => Boolean): SilentResult[T] = this match {
+ case SilentResultValue(value) if !p(value) => SilentTypeError(TypeErrorWrapper(new TypeError(NoPosition, "!p")))
+ case _ => this
+ }
+ @inline final def orElse[T1 >: T](f: AbsTypeError => T1): T1 = this match {
+ case SilentResultValue(value) => value
+ case SilentTypeError(err) => f(err)
}
}
-*/
-
- sealed abstract class SilentResult[+T]
case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { }
case class SilentResultValue[+T](value: T) extends SilentResult[T] { }
def newTyper(context: Context): Typer = new NormalTyper(context)
+
private class NormalTyper(context : Context) extends Typer(context)
// A transient flag to mark members of anonymous classes
// that are turned private by typedBlock
private final val SYNTHETIC_PRIVATE = TRANS_FLAG
- private def isPastTyper = phase.id > currentRun.typerPhase.id
+ private final val InterpolatorCodeRegex = """\$\{.*?\}""".r
+ private final val InterpolatorIdentRegex = """\$\w+""".r
// To enable decent error messages when the typer crashes.
// TODO - this only catches trees which go through def typed,
@@ -90,8 +99,8 @@ trait Typers extends Modes with Adaptations with Tags {
// when true:
// - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
// - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
- private def newPatternMatching = opt.virtPatmat && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+ // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
+ protected def newPatternMatching = true // presently overridden in the presentation compiler
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
import context0.unit
@@ -99,13 +108,18 @@ trait Typers extends Modes with Adaptations with Tags {
import TyperErrorGen._
val infer = new Inferencer(context0) {
- override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281
- tp.isError || pt.isError ||
- context0.implicitsEnabled && // this condition prevents chains of views
- inferView(EmptyTree, tp, pt, false) != EmptyTree
- }
+ // See SI-3281 re undoLog
+ override def isCoercible(tp: Type, pt: Type) = undoLog undo viewExists(tp, pt)
}
+ /** Overridden to false in scaladoc and/or interactive. */
+ def canAdaptConstantTypeToLiteral = true
+ def canTranslateEmptyListToNil = true
+ def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree
+
+ def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree =
+ typed(docDef.definition, mode, pt)
+
/** Find implicit arguments and pass them to given tree.
*/
def applyImplicitArgs(fun: Tree): Tree = fun.tpe match {
@@ -115,10 +129,7 @@ trait Typers extends Modes with Adaptations with Tags {
// paramFailed cannot be initialized with params.exists(_.tpe.isError) because that would
// hide some valid errors for params preceding the erroneous one.
var paramFailed = false
-
- def mkPositionalArg(argTree: Tree, paramName: Name) = argTree
- def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree)))
- var mkArg: (Tree, Name) => Tree = mkPositionalArg
+ var mkArg: (Name, Tree) => Tree = (_, tree) => tree
// DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1))
//
@@ -129,28 +140,27 @@ trait Typers extends Modes with Adaptations with Tags {
for(ar <- argResultsBuff)
paramTp = paramTp.subst(ar.subst.from, ar.subst.to)
- val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context)
+ val res = if (paramFailed || (paramTp.isError && {paramFailed = true; true})) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, isView = false, context)
argResultsBuff += res
if (res.isSuccess) {
- argBuff += mkArg(res.tree, param.name)
+ argBuff += mkArg(param.name, res.tree)
} else {
- mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
+ mkArg = gen.mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args
if (!param.hasDefault && !paramFailed) {
- context.errBuffer.find(_.kind == ErrorKinds.Divergent) match {
- case Some(divergentImplicit) if !settings.Xdivergence211.value =>
+ context.reportBuffer.errors.collectFirst {
+ case dte: DivergentImplicitTypeError => dte
+ } match {
+ case Some(divergent) =>
// DivergentImplicit error has higher priority than "no implicit found"
// no need to issue the problem again if we are still in silent mode
if (context.reportErrors) {
- context.issue(divergentImplicit)
- context.condBufferFlush(_.kind == ErrorKinds.Divergent)
- }
- case Some(divergentImplicit: DivergentImplicitTypeError) if settings.Xdivergence211.value =>
- if (context.reportErrors) {
- context.issue(divergentImplicit.withPt(paramTp))
- context.condBufferFlush(_.kind == ErrorKinds.Divergent)
+ context.issue(divergent.withPt(paramTp))
+ context.reportBuffer.clearErrors {
+ case dte: DivergentImplicitTypeError => true
+ }
}
- case None =>
+ case _ =>
NoImplicitFoundError(fun, param)
}
paramFailed = true
@@ -176,10 +186,17 @@ trait Typers extends Modes with Adaptations with Tags {
fun
}
+ def viewExists(from: Type, to: Type): Boolean = (
+ !from.isError
+ && !to.isError
+ && context.implicitsEnabled
+ && (inferView(EmptyTree, from, to, reportAmbiguous = false) != EmptyTree)
+ )
+
def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree =
- inferView(tree, from, to, reportAmbiguous, true)
+ inferView(tree, from, to, reportAmbiguous, saveErrors = true)
- /** Infer an implicit conversion (``view'') between two types.
+ /** Infer an implicit conversion (`view`) between two types.
* @param tree The tree which needs to be converted.
* @param from The source type of the conversion
* @param to The target type of the conversion
@@ -194,12 +211,12 @@ trait Typers extends Modes with Adaptations with Tags {
debuglog("infer view from "+from+" to "+to)//debug
if (isPastTyper) EmptyTree
else from match {
- case MethodType(_, _) => EmptyTree
+ case MethodType(_, _) => EmptyTree
case OverloadedType(_, _) => EmptyTree
- case PolyType(_, _) => EmptyTree
- case _ =>
+ case PolyType(_, _) => EmptyTree
+ case _ =>
def wrapImplicit(from: Type): Tree = {
- val result = inferImplicit(tree, functionType(from :: Nil, to), reportAmbiguous, true, context, saveErrors)
+ val result = inferImplicit(tree, functionType(from.withoutAnnotations :: Nil, to), reportAmbiguous, isView = true, context, saveAmbiguousDivergent = saveErrors)
if (result.subst != EmptyTreeTypeSubstituter) {
result.subst traverse tree
notifyUndetparamsInferred(result.subst.from, result.subst.to)
@@ -237,32 +254,6 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => tp
}
- /** Check that <code>tree</code> is a stable expression.
- *
- * @param tree ...
- * @return ...
- */
- def checkStable(tree: Tree): Tree = (
- if (treeInfo.isExprSafeToInline(tree)) tree
- else if (tree.isErrorTyped) tree
- else UnstableTreeError(tree)
- )
-
- /** Would tree be a stable (i.e. a pure expression) if the type
- * of its symbol was not volatile?
- */
- protected def isStableExceptVolatile(tree: Tree) = {
- tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile &&
- { val savedTpe = tree.symbol.info
- val savedSTABLE = tree.symbol getFlag STABLE
- tree.symbol setInfo AnyRefClass.tpe
- tree.symbol setFlag STABLE
- val result = treeInfo.isExprSafeToInline(tree)
- tree.symbol setInfo savedTpe
- tree.symbol setFlag savedSTABLE
- result
- }
- }
private def errorNotClass(tpt: Tree, found: Type) = { ClassTypeRequiredError(tpt, found); false }
private def errorNotStable(tpt: Tree, found: Type) = { TypeNotAStablePrefixError(tpt, found); false }
@@ -294,16 +285,11 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- /** Check that type <code>tp</code> is not a subtype of itself.
- *
- * @param pos ...
- * @param tp ...
- * @return <code>true</code> if <code>tp</code> is not a subtype of itself.
+ /** Check that type `tp` is not a subtype of itself.
*/
def checkNonCyclic(pos: Position, tp: Type): Boolean = {
def checkNotLocked(sym: Symbol) = {
- sym.initialize
- sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
+ sym.initialize.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false }
}
tp match {
case TypeRef(pre, sym, args) =>
@@ -314,12 +300,6 @@ trait Typers extends Modes with Adaptations with Tags {
case SingleType(pre, sym) =>
checkNotLocked(sym)
-/*
- case TypeBounds(lo, hi) =>
- var ok = true
- for (t <- lo) ok = ok & checkNonCyclic(pos, t)
- ok
-*/
case st: SubType =>
checkNonCyclic(pos, st.supertype)
case ct: CompoundType =>
@@ -330,19 +310,19 @@ trait Typers extends Modes with Adaptations with Tags {
}
def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try {
- if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false
+ if (!lockedSym.lock(CyclicReferenceError(pos, tp, lockedSym))) false
else checkNonCyclic(pos, tp)
} finally {
lockedSym.unlock()
}
def checkNonCyclic(sym: Symbol) {
- if (!checkNonCyclic(sym.pos, sym.tpe)) sym.setInfo(ErrorType)
+ if (!checkNonCyclic(sym.pos, sym.tpe_*)) sym.setInfo(ErrorType)
}
def checkNonCyclic(defn: Tree, tpt: Tree) {
if (!checkNonCyclic(defn.pos, tpt.tpe, defn.symbol)) {
- tpt.tpe = ErrorType
+ tpt setType ErrorType
defn.symbol.setInfo(ErrorType)
}
}
@@ -373,28 +353,13 @@ trait Typers extends Modes with Adaptations with Tags {
private var scope: Scope = _
private var hiddenSymbols: List[Symbol] = _
- /** Check that type <code>tree</code> does not refer to private
+ /** Check that type `tree` does not refer to private
* components unless itself is wrapped in something private
- * (<code>owner</code> tells where the type occurs).
- *
- * @param owner ...
- * @param tree ...
- * @return ...
+ * (`owner` tells where the type occurs).
*/
def privates[T <: Tree](owner: Symbol, tree: T): T =
check(owner, EmptyScope, WildcardType, tree)
- /** Check that type <code>tree</code> does not refer to entities
- * defined in scope <code>scope</code>.
- *
- * @param scope ...
- * @param pt ...
- * @param tree ...
- * @return ...
- */
- def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T =
- check(NoSymbol, scope, pt, tree)
-
private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = {
this.owner = owner
this.scope = scope
@@ -470,7 +435,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** The qualifying class
- * of a this or super with prefix <code>qual</code>.
+ * of a this or super with prefix `qual`.
* packageOk is equal false when qualifying class symbol
*/
def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean) =
@@ -511,11 +476,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
@inline
- final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
- f(newTyper(c))
- }
-
- @inline
final def withSavedContext[T](c: Context)(f: => T) = {
val savedErrors = c.flushAndReturnBuffer()
val res = f
@@ -533,8 +493,6 @@ trait Typers extends Modes with Adaptations with Tags {
typer1
} else this
- final val xtypes = false
-
/** Is symbol defined and not stale?
*/
def reallyExists(sym: Symbol) = {
@@ -553,15 +511,21 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /** Does the context of tree <code>tree</code> require a stable type?
+ /** Does the context of tree `tree` require a stable type?
*/
- private def isStableContext(tree: Tree, mode: Int, pt: Type) =
- isNarrowable(tree.tpe) && ((mode & (EXPRmode | LHSmode)) == EXPRmode) &&
- (xtypes ||
- (pt.isStable ||
- (mode & QUALmode) != 0 && !tree.symbol.isConstant ||
- pt.typeSymbol.isAbstractType && pt.bounds.lo.isStable && !(tree.tpe <:< pt)) ||
- pt.typeSymbol.isRefinementClass && !(tree.tpe <:< pt))
+ private def isStableContext(tree: Tree, mode: Mode, pt: Type) = {
+ def ptSym = pt.typeSymbol
+ def expectsStable = (
+ pt.isStable
+ || mode.inQualMode && !tree.symbol.isConstant
+ || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.bounds.lo.isStable || ptSym.isRefinementClass)
+ )
+
+ ( isNarrowable(tree.tpe)
+ && mode.typingExprNotLhs
+ && expectsStable
+ )
+ }
/** Make symbol accessible. This means:
* If symbol refers to package object, insert `.package` as second to last selector.
@@ -572,11 +536,13 @@ trait Typers extends Modes with Adaptations with Tags {
* @return modified tree and new prefix type
*/
private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) =
- if (isInPackageObject(sym, pre.typeSymbol)) {
+ if (context.isInPackageObject(sym, pre.typeSymbol)) {
if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) {
// short cut some aliases. It seems pattern matching needs this
// to notice exhaustiveness and to generate good code when
// List extractors are mixed with :: patterns. See Test5 in lists.scala.
+ //
+ // TODO SI-6609 Eliminate this special case once the old pattern matcher is removed.
def dealias(sym: Symbol) =
(atPos(tree.pos.makeTransparent) {gen.mkAttributedRef(sym)} setPos tree.pos, sym.owner.thisType)
sym.name match {
@@ -605,66 +571,59 @@ trait Typers extends Modes with Adaptations with Tags {
(checkAccessible(tree, sym, pre, site), pre)
}
- /** Is `sym` defined in package object of package `pkg`?
- */
- private def isInPackageObject(sym: Symbol, pkg: Symbol) = {
- def isInPkgObj(sym: Symbol) =
- !sym.owner.isPackage && {
- sym.owner.isPackageObjectClass &&
- sym.owner.owner == pkg ||
- pkg.isInitialized && {
- // need to be careful here to not get a cyclic reference during bootstrap
- val pkgobj = pkg.info.member(nme.PACKAGEkw)
- pkgobj.isInitialized &&
- (pkgobj.info.member(sym.name).alternatives contains sym)
- }
- }
- pkg.isPackageClass && {
- if (sym.isOverloaded) sym.alternatives forall isInPkgObj
- else isInPkgObj(sym)
- }
- }
-
/** Post-process an identifier or selection node, performing the following:
- * 1. Check that non-function pattern expressions are stable
+ * 1. Check that non-function pattern expressions are stable (ignoring volatility concerns -- SI-6815)
+ * (and narrow the type of modules: a module reference in a pattern has type Foo.type, not "object Foo")
* 2. Check that packages and static modules are not used as values
* 3. Turn tree type into stable type if possible and required by context.
* 4. Give getClass calls a more precise type based on the type of the target of the call.
*/
- private def stabilize(tree: Tree, pre: Type, mode: Int, pt: Type): Tree = {
- if (tree.symbol.isOverloaded && !inFunMode(mode))
+ private def stabilize(tree: Tree, pre: Type, mode: Mode, pt: Type): Tree = {
+ // Side effect time! Don't be an idiot like me and think you
+ // can move "val sym = tree.symbol" before this line, because
+ // inferExprAlternative side-effects the tree's symbol.
+ if (tree.symbol.isOverloaded && !mode.inFunMode)
inferExprAlternative(tree, pt)
val sym = tree.symbol
- def fail() = NotAValueError(tree, sym)
+ val isStableIdPattern = mode.typingPatternNotConstructor && tree.isTerm
- if (tree.isErrorTyped) tree
- else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1)
- if (sym.isValue) {
- val tree1 = checkStable(tree)
- // A module reference in a pattern has type Foo.type, not "object Foo"
- if (sym.isModule && !sym.isMethod) tree1 setType singleType(pre, sym)
- else tree1
- }
- else fail()
- } else if ((mode & (EXPRmode | QUALmode)) == EXPRmode && !sym.isValue && !phase.erasedTypes) { // (2)
- fail()
- } else {
- if (sym.isStable && pre.isStable && !isByNameParamType(tree.tpe) &&
- (isStableContext(tree, mode, pt) || sym.isModule && !sym.isMethod))
- tree.setType(singleType(pre, sym))
- // To fully benefit from special casing the return type of
- // getClass, we have to catch it immediately so expressions
- // like x.getClass().newInstance() are typed with the type of x.
- else if ( isGetClass(tree.symbol)
- // TODO: If the type of the qualifier is inaccessible, we can cause private types
- // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this
- // so for now it requires the type symbol be public.
- && pre.typeSymbol.isPublic)
- tree setType MethodType(Nil, getClassReturnType(pre))
- else
- tree
- }
+ def isModuleTypedExpr = (
+ treeInfo.admitsTypeSelection(tree)
+ && (isStableContext(tree, mode, pt) || sym.isModuleNotMethod)
+ )
+ def isStableValueRequired = (
+ isStableIdPattern
+ || mode.in(all = EXPRmode, none = QUALmode) && !phase.erasedTypes
+ )
+ // To fully benefit from special casing the return type of
+ // getClass, we have to catch it immediately so expressions like
+ // x.getClass().newInstance() are typed with the type of x. TODO: If the
+ // type of the qualifier is inaccessible, we can cause private types to
+ // escape scope here, e.g. pos/t1107. I'm not sure how to properly handle
+ // this so for now it requires the type symbol be public.
+ def isGetClassCall = isGetClass(sym) && pre.typeSymbol.isPublic
+
+ def narrowIf(tree: Tree, condition: Boolean) =
+ if (condition) tree setType singleType(pre, sym) else tree
+
+ def checkStable(tree: Tree): Tree =
+ if (treeInfo.isStableIdentifierPattern(tree)) tree
+ else UnstableTreeError(tree)
+
+ if (tree.isErrorTyped)
+ tree
+ else if (!sym.isValue && isStableValueRequired) // (2)
+ NotAValueError(tree, sym)
+ else if (isStableIdPattern) // (1)
+ // A module reference in a pattern has type Foo.type, not "object Foo"
+ narrowIf(checkStable(tree), sym.isModuleNotMethod)
+ else if (isModuleTypedExpr) // (3)
+ narrowIf(tree, true)
+ else if (isGetClassCall) // (4)
+ tree setType MethodType(Nil, getClassReturnType(pre))
+ else
+ tree
}
private def isNarrowable(tpe: Type): Boolean = unwrapWrapperTypes(tpe) match {
@@ -672,22 +631,21 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => !phase.erasedTypes
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def stabilizeFun(tree: Tree, mode: Int, pt: Type): Tree = {
+ def stabilizeFun(tree: Tree, mode: Mode, pt: Type): Tree = {
val sym = tree.symbol
val pre = tree match {
case Select(qual, _) => qual.tpe
- case _ => NoPrefix
+ case _ => NoPrefix
+ }
+ def stabilizable = (
+ pre.isStable
+ && sym.tpe.params.isEmpty
+ && (isStableContext(tree, mode, pt) || sym.isModule)
+ )
+ tree.tpe match {
+ case MethodType(_, _) if stabilizable => tree setType MethodType(Nil, singleType(pre, sym)) // TODO: should this be a NullaryMethodType?
+ case _ => tree
}
- if (tree.tpe.isInstanceOf[MethodType] && pre.isStable && sym.tpe.params.isEmpty &&
- (isStableContext(tree, mode, pt) || sym.isModule))
- tree.setType(MethodType(List(), singleType(pre, sym))) // TODO: should this be a NullaryMethodType?
- else tree
}
/** The member with given name of given qualifier tree */
@@ -728,17 +686,14 @@ trait Typers extends Modes with Adaptations with Tags {
context.undetparams = context1.undetparams
context.savedTypeBounds = context1.savedTypeBounds
context.namedApplyBlockInfo = context1.namedApplyBlockInfo
- if (context1.hasErrors) {
- stopStats()
- SilentTypeError(context1.errBuffer.head)
- } else {
- // If we have a successful result, emit any warnings it created.
- if (context1.hasWarnings) {
- context1.flushAndReturnWarningsBuffer() foreach {
- case (pos, msg) => unit.warning(pos, msg)
- }
- }
- SilentResultValue(result)
+ context1.firstError match {
+ case Some(err) =>
+ stopStats()
+ SilentTypeError(err)
+ case None =>
+ // If we have a successful result, emit any warnings it created.
+ context1.flushAndIssueWarnings()
+ SilentResultValue(result)
}
} else {
assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer")
@@ -775,7 +730,7 @@ trait Typers extends Modes with Adaptations with Tags {
featureTrait.owner.ownerChain.takeWhile(_ != languageFeatureModule.moduleClass).reverse
val featureName = (nestedOwners map (_.name + ".")).mkString + featureTrait.name
def action(): Boolean = {
- def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, true, false, context).isSuccess
+ def hasImport = inferImplicit(EmptyTree: Tree, featureTrait.tpe, reportAmbiguous = true, isView = false, context).isSuccess
def hasOption = settings.language.value exists (s => s == featureName || s == "_")
val OK = hasImport || hasOption
if (!OK) {
@@ -849,10 +804,12 @@ trait Typers extends Modes with Adaptations with Tags {
* (14) When in mode EXPRmode, apply a view
* If all this fails, error
*/
- protected def adapt(tree: Tree, mode: Int, pt: Type, original: Tree = EmptyTree): Tree = {
+ protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = {
+ def hasUndets = context.undetparams.nonEmpty
+ def hasUndetsInMonoMode = hasUndets && !mode.inPolyMode
def adaptToImplicitMethod(mt: MethodType): Tree = {
- if (context.undetparams.nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0`
+ if (hasUndets) { // (9) -- should revisit dropped condition `hasUndetsInMonoMode`
// dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed
// needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition?
context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt,
@@ -864,28 +821,27 @@ trait Typers extends Modes with Adaptations with Tags {
// avoid throwing spurious DivergentImplicit errors
if (context.hasErrors)
- return setError(tree)
-
- withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 =>
- if (original != EmptyTree && pt != WildcardType)
- typer1.silent(tpr => {
- val withImplicitArgs = tpr.applyImplicitArgs(tree)
- if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
- else tpr.typed(withImplicitArgs, mode, pt)
- }) match {
- case SilentResultValue(result) =>
- result
- case _ =>
+ setError(tree)
+ else
+ withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 =>
+ if (original != EmptyTree && pt != WildcardType) (
+ typer1 silent { tpr =>
+ val withImplicitArgs = tpr.applyImplicitArgs(tree)
+ if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway
+ else tpr.typed(withImplicitArgs, mode, pt)
+ }
+ orElse { _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
- val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
+ val tree1 = typed(resetAllAttrs(original), mode)
// Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
// we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
- tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
+ tree1 setType pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
- }
- else
- typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
- }
+ }
+ )
+ else
+ typer1.typed(typer1.applyImplicitArgs(tree), mode, pt)
+ )
}
def instantiateToMethodType(mt: MethodType): Tree = {
@@ -894,67 +850,74 @@ trait Typers extends Modes with Adaptations with Tags {
case Block(_, tree1) => tree1.symbol
case _ => tree.symbol
}
- if (!meth.isConstructor && !meth.isTermMacro && isFunctionType(pt)) { // (4.2)
- debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt)
+ if (!meth.isConstructor && isFunctionType(pt)) { // (4.2)
+ debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt")
checkParamsConvertible(tree, tree.tpe)
val tree0 = etaExpand(context.unit, tree, this)
- // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode))
-
- if (context.undetparams.nonEmpty) {
- // #2624: need to infer type arguments for eta expansion of a polymorphic method
- // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
- // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
- // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
- // (note that (3) does not call typed to do the polymorphic type instantiation --
- // it is called after the tree has been typed with a polymorphic expected result type)
- instantiate(typed(tree0, mode, WildcardType), mode, pt)
- } else
+
+ // #2624: need to infer type arguments for eta expansion of a polymorphic method
+ // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand)
+ // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null
+ // can't type with the expected type, as we can't recreate the setup in (3) without calling typed
+ // (note that (3) does not call typed to do the polymorphic type instantiation --
+ // it is called after the tree has been typed with a polymorphic expected result type)
+ if (hasUndets)
+ instantiate(typed(tree0, mode), mode, pt)
+ else
typed(tree0, mode, pt)
- } else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3)
- adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original)
- } else if (context.implicitsEnabled) {
+ }
+ else if (!meth.isConstructor && mt.params.isEmpty) // (4.3)
+ adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original)
+ else if (context.implicitsEnabled)
MissingArgsForMethodTpeError(tree, meth)
- } else {
+ else
setError(tree)
- }
}
def adaptType(): Tree = {
- if (inFunMode(mode)) {
- // todo. the commented line below makes sense for typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
- // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
- // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
- // tree setType tree.tpe.normalize
+ // @M When not typing a type constructor (!context.inTypeConstructorAllowed)
+ // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
+ // and thus parameterized types must be applied to their type arguments
+ // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ def properTypeRequired = (
+ tree.hasSymbolField
+ && !context.inTypeConstructorAllowed
+ && !(tree.symbol.isJavaDefined && context.unit.isJava)
+ )
+ // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
+ // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
+ // @M: removed check for tree.hasSymbolField and replace tree.symbol by tree.tpe.symbol
+ // (TypeTree's must also be checked here, and they don't directly have a symbol)
+ def kindArityMismatch = (
+ context.inTypeConstructorAllowed
+ && !sameLength(tree.tpe.typeParams, pt.typeParams)
+ )
+ // Note that we treat Any and Nothing as kind-polymorphic.
+ // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
+ // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ def kindArityMismatchOk = tree.tpe.typeSymbol match {
+ case NothingClass | AnyClass => true
+ case _ => pt == WildcardType
+ }
+
+ // todo. It would make sense when mode.inFunMode to instead use
+ // tree setType tree.tpe.normalize
+ // when typechecking, say, TypeApply(Ident(`some abstract type symbol`), List(...))
+ // because otherwise Ident will have its tpe set to a TypeRef, not to a PolyType, and `typedTypeApply` will fail
+ // but this needs additional investigation, because it crashes t5228, gadts1 and maybe something else
+ if (mode.inFunMode)
tree
- } else if (tree.hasSymbol && !tree.symbol.typeParams.isEmpty && !inHKMode(mode) &&
- !(tree.symbol.isJavaDefined && context.unit.isJava)) { // (7)
- // @M When not typing a higher-kinded type ((mode & HKmode) == 0)
- // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *,
- // and thus parameterized types must be applied to their type arguments
- // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't?
+ else if (properTypeRequired && tree.symbol.typeParams.nonEmpty) // (7)
MissingTypeParametersError(tree)
- } else if ( // (7.1) @M: check kind-arity
- // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol)
- (inHKMode(mode)) &&
- // @M: don't check tree.tpe.symbol.typeParams. check tree.tpe.typeParams!!!
- // (e.g., m[Int] --> tree.tpe.symbol.typeParams.length == 1, tree.tpe.typeParams.length == 0!)
- !sameLength(tree.tpe.typeParams, pt.typeParams) &&
- !(tree.tpe.typeSymbol == AnyClass ||
- tree.tpe.typeSymbol == NothingClass ||
- pt == WildcardType)) {
- // Check that the actual kind arity (tree.symbol.typeParams.length) conforms to the expected
- // kind-arity (pt.typeParams.length). Full checks are done in checkKindBounds in Infer.
- // Note that we treat Any and Nothing as kind-polymorphic.
- // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved
- // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1).
+ else if (kindArityMismatch && !kindArityMismatchOk) // (7.1) @M: check kind-arity
KindArityMismatchError(tree, pt)
- } else tree match { // (6)
+ else tree match { // (6)
case TypeTree() => tree
case _ => TypeTree(tree.tpe) setOriginal tree
}
}
- /**
+ /*
* To deal with the type slack between actual (run-time) types and statically known types, for each abstract type T,
* reflect its variance as a skolem that is upper-bounded by T (covariant position), or lower-bounded by T (contravariant).
*
@@ -984,10 +947,63 @@ trait Typers extends Modes with Adaptations with Tags {
def adaptConstrPattern(): Tree = { // (5)
def hasUnapplyMember(tp: Type) = reallyExists(unapplyMember(tp))
val overloadedExtractorOfObject = tree.symbol filter (sym => hasUnapplyMember(sym.tpe))
- // if the tree's symbol's type does not define an extractor, maybe the tree's type does
- // this is the case when we encounter an arbitrary tree as the target of an unapply call (rather than something that looks like a constructor call)
- // (for now, this only happens due to wrapClassTagUnapply, but when we support parameterized extractors, it will become more common place)
+ // if the tree's symbol's type does not define an extractor, maybe the tree's type does.
+ // this is the case when we encounter an arbitrary tree as the target of an unapply call
+ // (rather than something that looks like a constructor call.) (for now, this only happens
+ // due to wrapClassTagUnapply, but when we support parameterized extractors, it will become
+ // more common place)
val extractor = overloadedExtractorOfObject orElse unapplyMember(tree.tpe)
+ def convertToCaseConstructor(clazz: Symbol): TypeTree = {
+ // convert synthetic unapply of case class to case class constructor
+ val prefix = tree.tpe.prefix
+ val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
+ .setOriginal(tree)
+
+ val skolems = new mutable.ListBuffer[TypeSymbol]
+ object variantToSkolem extends TypeMap(trackVariance = true) {
+ def apply(tp: Type) = mapOver(tp) match {
+ // !!! FIXME - skipping this when variance.isInvariant allows unsoundness, see SI-5189
+ case TypeRef(NoPrefix, tpSym, Nil) if !variance.isInvariant && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
+ // must initialize or tpSym.tpe might see random type params!!
+ // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
+ // TODO: why is that??
+ tpSym.initialize
+ val bounds = if (variance.isPositive) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
+ skolems += skolem
+ skolem.tpe
+ case tp1 => tp1
+ }
+ }
+
+ // have to open up the existential and put the skolems in scope
+ // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
+ val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
+ val freeVars = skolems.toList
+
+ // use "tree" for the context, not context.tree: don't make another CaseDef context,
+ // as instantiateTypeVar's bounds would end up there
+ val ctorContext = context.makeNewScope(tree, context.owner)
+ freeVars foreach ctorContext.scope.enter
+ newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
+
+ // simplify types without losing safety,
+ // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
+ val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
+ val extrapolated = tree1.tpe match {
+ case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
+ copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
+ case tp => tp
+ }
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 setType extrapolated
+ }
+
if (extractor != NoSymbol) {
// if we did some ad-hoc overloading resolution, update the tree's symbol
// do not update the symbol if the tree's symbol's type does not define an unapply member
@@ -996,72 +1012,31 @@ trait Typers extends Modes with Adaptations with Tags {
tree setSymbol overloadedExtractorOfObject
tree.tpe match {
- case OverloadedType(pre, alts) => tree.tpe = overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
+ case OverloadedType(pre, alts) => tree setType overloadedType(pre, alts filter (alt => hasUnapplyMember(alt.tpe)))
case _ =>
}
val unapply = unapplyMember(extractor.tpe)
val clazz = unapplyParameterType(unapply)
- if (unapply.isCase && clazz.isCase && !(clazz.ancestors exists (_.isCase))) {
- // convert synthetic unapply of case class to case class constructor
- val prefix = tree.tpe.prefix
- val tree1 = TypeTree(clazz.primaryConstructor.tpe.asSeenFrom(prefix, clazz.owner))
- .setOriginal(tree)
-
- val skolems = new mutable.ListBuffer[TypeSymbol]
- object variantToSkolem extends VariantTypeMap {
- def apply(tp: Type) = mapOver(tp) match {
- case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
- // must initialize or tpSym.tpe might see random type params!!
- // without this, we'll get very weird types inferred in test/scaladoc/run/SI-5933.scala
- // TODO: why is that??
- tpSym.initialize
- val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- // origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
- skolems += skolem
- skolem.tpe
- case tp1 => tp1
- }
- }
-
- // have to open up the existential and put the skolems in scope
- // can't simply package up pt in an ExistentialType, because that takes us back to square one (List[_ <: T] == List[T] due to covariance)
- val ptSafe = variantToSkolem(pt) // TODO: pt.skolemizeExistential(context.owner, tree) ?
- val freeVars = skolems.toList
-
- // use "tree" for the context, not context.tree: don't make another CaseDef context,
- // as instantiateTypeVar's bounds would end up there
- val ctorContext = context.makeNewScope(tree, context.owner)
- freeVars foreach ctorContext.scope.enter
- newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
-
- // simplify types without losing safety,
- // so that we get rid of unnecessary type slack, and so that error messages don't unnecessarily refer to skolems
- val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
- val extrapolated = tree1.tpe match {
- case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
- ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
- copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
- case tp => tp
- }
-
- // once the containing CaseDef has been type checked (see typedCase),
- // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
- tree1 setType extrapolated
+ if (unapply.isCase && clazz.isCase) {
+ convertToCaseConstructor(clazz)
} else {
tree
}
} else {
- CaseClassConstructorError(tree)
+ val clazz = tree.tpe.typeSymbol.linkedClassOfClass
+ if (clazz.isCase)
+ convertToCaseConstructor(clazz)
+ else
+ CaseClassConstructorError(tree)
}
}
def insertApply(): Tree = {
- assert(!inHKMode(mode), modeString(mode)) //@M
+ assert(!context.inTypeConstructorAllowed, mode) //@M
val adapted = adaptToName(tree, nme.apply)
- def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, MonoQualifierModes, WildcardType)
+
// TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
val qual = adapted match {
case This(_) =>
@@ -1082,31 +1057,199 @@ trait Typers extends Modes with Adaptations with Tags {
Select(qual setPos tree.pos.makeTransparent, nme.apply)
}
}
+ def adaptConstant(value: Constant): Tree = {
+ val sym = tree.symbol
+ if (sym != null && sym.isDeprecated) {
+ val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
+ unit.deprecationWarning(tree.pos, msg)
+ }
+ treeCopy.Literal(tree, value)
+ }
+
+ // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
+ // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
+ // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
+ //
+ // val x: T = expr
+ //
+ // where T is the type of expr, but T contains existential skolems ts.
+ // In that case, this value definition does not typecheck.
+ // The value definition
+ //
+ // val x: T forSome { ts } = expr
+ //
+ // would typecheck. Or one can simply leave out the type of the `val`:
+ //
+ // val x = expr
+ //
+ // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
+ // type is an existential type.
+ //
+ // The reason for both failures have to do with the way we (don't) transform
+ // skolem types along with the trees that contain them. We'd need a
+ // radically different approach to do it. But before investing a lot of time to
+ // to do this (I have already sunk 3 full days with in the end futile attempts
+ // to consistently transform skolems and fix 6029), I'd like to
+ // investigate ways to avoid skolems completely.
+ //
+ // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
+ // (which is the return type of the macro definition instantiated in the context of expandee):
+ //
+ // Test.scala:2: error: type mismatch;
+ // found : $u.Expr[Class[_ <: Object]]
+ // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
+ // scala.reflect.runtime.universe.reify(new Object().getClass)
+ // ^
+ // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
+ // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
+ //
+ def adaptMismatchedSkolems() = {
+ def canIgnoreMismatch = (
+ !context.reportErrors && isPastTyper
+ || tree.attachments.get[MacroExpansionAttachment].isDefined
+ )
+ def bound = pt match {
+ case ExistentialType(qs, _) => qs
+ case _ => Nil
+ }
+ def msg = sm"""
+ |Recovering from existential or skolem type error in
+ | $tree
+ |with type: ${tree.tpe}
+ | pt: $pt
+ | context: ${context.tree}
+ | adapted
+ """.trim
+
+ val boundOrSkolems = if (canIgnoreMismatch) bound ++ pt.skolemsExceptMethodTypeParams else Nil
+ boundOrSkolems match {
+ case Nil => AdaptTypeError(tree, tree.tpe, pt) ; setError(tree)
+ case _ => logResult(msg)(adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt)))
+ }
+ }
+
+ def fallbackAfterVanillaAdapt(): Tree = {
+ def isPopulatedPattern = {
+ if ((tree.symbol ne null) && tree.symbol.isModule)
+ inferModulePattern(tree, pt)
+
+ isPopulated(tree.tpe, approximateAbstracts(pt))
+ }
+ if (mode.inPatternMode && isPopulatedPattern)
+ return tree
+
+ val tree1 = constfold(tree, pt) // (10) (11)
+ if (tree1.tpe <:< pt)
+ return adapt(tree1, mode, pt, original)
+
+ if (mode.typingExprNotFun) {
+ // The <: Any requirement inhibits attempts to adapt continuation types
+ // to non-continuation types.
+ if (tree.tpe <:< AnyTpe) pt.dealias match {
+ case TypeRef(_, UnitClass, _) => // (12)
+ if (settings.warnValueDiscard)
+ context.unit.warning(tree.pos, "discarded non-Unit value")
+ return typedPos(tree.pos, mode, pt)(Block(List(tree), Literal(Constant(()))))
+ case TypeRef(_, sym, _) if isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt) =>
+ if (settings.warnNumericWiden)
+ context.unit.warning(tree.pos, "implicit numeric widening")
+ return typedPos(tree.pos, mode, pt)(Select(tree, "to" + sym.name))
+ case _ =>
+ }
+ if (pt.dealias.annotations.nonEmpty && canAdaptAnnotations(tree, this, mode, pt)) // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
+
+ if (hasUndets)
+ return instantiate(tree, mode, pt)
+
+ if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
+ // (14); the condition prevents chains of views
+ debuglog("inferring view from " + tree.tpe + " to " + pt)
+ inferView(tree, tree.tpe, pt, reportAmbiguous = true) match {
+ case EmptyTree =>
+ case coercion =>
+ def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
+ if (settings.logImplicitConv)
+ unit.echo(tree.pos, msg)
+
+ debuglog(msg)
+ val silentContext = context.makeImplicit(context.ambiguousErrors)
+ val res = newTyper(silentContext).typed(
+ new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
+ silentContext.firstError match {
+ case Some(err) => context.issue(err)
+ case None => return res
+ }
+ }
+ }
+ }
+
+ debuglog("error tree = " + tree)
+ if (settings.debug && settings.explaintypes)
+ explainTypes(tree.tpe, pt)
+
+ if (tree.tpe.isErroneous || pt.isErroneous)
+ setError(tree)
+ else
+ adaptMismatchedSkolems()
+ }
+
+ def vanillaAdapt(tree: Tree) = {
+ def applyPossible = {
+ def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
+ def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty)
+ def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0)
+
+ dyna.acceptsApplyDynamic(tree.tpe) || (
+ if (mode.inTappMode)
+ tree.tpe.typeParams.isEmpty && hasPolymorphicApply
+ else
+ hasMonomorphicApply
+ )
+ }
+ def shouldInsertApply(tree: Tree) = mode.typingExprFun && {
+ tree.tpe match {
+ case _: MethodType | _: OverloadedType | _: PolyType => false
+ case _ => applyPossible
+ }
+ }
+ if (tree.isType)
+ adaptType()
+ else if (mode.typingExprNotFun && treeInfo.isMacroApplication(tree))
+ macroExpandApply(this, tree, mode, pt)
+ else if (mode.typingConstructorPattern)
+ adaptConstrPattern()
+ else if (shouldInsertApply(tree))
+ insertApply()
+ else if (hasUndetsInMonoMode) { // (9)
+ assert(!context.inTypeConstructorAllowed, context) //@M
+ instantiatePossiblyExpectingUnit(tree, mode, pt)
+ }
+ else if (tree.tpe <:< pt)
+ tree
+ else
+ fallbackAfterVanillaAdapt()
+ }
// begin adapt
- tree.tpe match {
+ if (isMacroImplRef(tree)) {
+ if (treeInfo.isMacroApplication(tree)) adapt(unmarkMacroImplRef(tree), mode, pt, original)
+ else tree
+ } else tree.tpe match {
case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
adaptAnnotations(tree, this, mode, pt)
- case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
- val sym = tree.symbol
- if (sym != null && sym.isDeprecated) {
- val msg = sym.toString + sym.locationString + " is deprecated: " + sym.deprecationMessage.getOrElse("")
- unit.deprecationWarning(tree.pos, msg)
- }
- treeCopy.Literal(tree, value)
- case OverloadedType(pre, alts) if !inFunMode(mode) => // (1)
+ case ct @ ConstantType(value) if mode.inNone(TYPEmode | FUNmode) && (ct <:< pt) && canAdaptConstantTypeToLiteral => // (0)
+ adaptConstant(value)
+ case OverloadedType(pre, alts) if !mode.inFunMode => // (1)
inferExprAlternative(tree, pt)
adapt(tree, mode, pt, original)
case NullaryMethodType(restpe) => // (2)
adapt(tree setType restpe, mode, pt, original)
- case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
+ case TypeRef(_, ByNameParamClass, arg :: Nil) if mode.inExprMode => // (2)
adapt(tree setType arg, mode, pt, original)
- case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
- ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
- case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
- case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
+ case tp if mode.typingExprNotLhs && isExistentialType(tp) =>
+ adapt(tree setType tp.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
+ case PolyType(tparams, restpe) if mode.inNone(TAPPmode | PATTERNmode) && !context.inTypeConstructorAllowed => // (3)
// assert((mode & HKmode) == 0) //@M a PolyType in HKmode represents an anonymous type function,
// we're in HKmode since a higher-kinded type is expected --> hence, don't implicitly apply it to type params!
// ticket #2197 triggered turning the assert into a guard
@@ -1115,176 +1258,24 @@ trait Typers extends Modes with Adaptations with Tags {
// -- are we sure we want to expand aliases this early?
// -- what caused this change in behaviour??
val tparams1 = cloneSymbols(tparams)
- val tree1 = if (tree.isType) tree
- else TypeApply(tree, tparams1 map (tparam =>
- TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ val tree1 = (
+ if (tree.isType) tree
+ else TypeApply(tree, tparams1 map (tparam => TypeTree(tparam.tpeHK) setPos tree.pos.focus)) setPos tree.pos
+ )
context.undetparams ++= tparams1
notifyUndetparamsAdded(tparams1)
adapt(tree1 setType restpe.substSym(tparams, tparams1), mode, pt, original)
- case mt: MethodType if mt.isImplicit && ((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) => // (4.1)
- adaptToImplicitMethod(mt)
- case mt: MethodType if (((mode & (EXPRmode | FUNmode | LHSmode)) == EXPRmode) &&
- (context.undetparams.isEmpty || inPolyMode(mode))) && !(tree.symbol != null && tree.symbol.isTermMacro) =>
+ case mt: MethodType if mode.typingExprNotFunNotLhs && mt.isImplicit => // (4.1)
+ adaptToImplicitMethod(mt)
+ case mt: MethodType if mode.typingExprNotFunNotLhs && !hasUndetsInMonoMode && !treeInfo.isMacroApplicationOrBlock(tree) =>
instantiateToMethodType(mt)
-
case _ =>
- def shouldInsertApply(tree: Tree) = inAllModes(mode, EXPRmode | FUNmode) && (tree.tpe match {
- case _: MethodType | _: OverloadedType | _: PolyType => false
- case _ => applyPossible
- })
- def applyPossible = {
- def applyMeth = member(adaptToName(tree, nme.apply), nme.apply)
- dyna.acceptsApplyDynamic(tree.tpe) || (
- if ((mode & TAPPmode) != 0)
- tree.tpe.typeParams.isEmpty && applyMeth.filter(!_.tpe.typeParams.isEmpty) != NoSymbol
- else
- applyMeth.filter(_.tpe.paramSectionCount > 0) != NoSymbol
- )
- }
- if (tree.isType)
- adaptType()
- else if (
- inExprModeButNot(mode, FUNmode) && !tree.isDef && // typechecking application
- tree.symbol != null && tree.symbol.isTermMacro && // of a macro
- !tree.attachments.get[SuppressMacroExpansionAttachment.type].isDefined)
- macroExpand(this, tree, mode, pt)
- else if ((mode & (PATTERNmode | FUNmode)) == (PATTERNmode | FUNmode))
- adaptConstrPattern()
- else if (shouldInsertApply(tree))
- insertApply()
- else if (context.undetparams.nonEmpty && !inPolyMode(mode)) { // (9)
- assert(!inHKMode(mode), modeString(mode)) //@M
- instantiatePossiblyExpectingUnit(tree, mode, pt)
- } else if (tree.tpe <:< pt) {
- tree
- } else {
- def fallBack: Tree = {
- if (inPatternMode(mode)) {
- if ((tree.symbol ne null) && tree.symbol.isModule)
- inferModulePattern(tree, pt)
- if (isPopulated(tree.tpe, approximateAbstracts(pt)))
- return tree
- }
- val tree1 = constfold(tree, pt) // (10) (11)
- if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
- else {
- if (inExprModeButNot(mode, FUNmode)) {
- pt.dealias match {
- case TypeRef(_, sym, _) =>
- // note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
- // infinite expansion if pt is constant type ()
- if (sym == UnitClass && tree.tpe <:< AnyClass.tpe) { // (12)
- if (settings.warnValueDiscard.value)
- context.unit.warning(tree.pos, "discarded non-Unit value")
- return typedPos(tree.pos, mode, pt) {
- Block(List(tree), Literal(Constant()))
- }
- } else if (isNumericValueClass(sym) && isNumericSubType(tree.tpe, pt)) {
- if (settings.warnNumericWiden.value)
- context.unit.warning(tree.pos, "implicit numeric widening")
- return typedPos(tree.pos, mode, pt) {
- Select(tree, "to" + sym.name)
- }
- }
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
- case _ =>
- }
- if (!context.undetparams.isEmpty) {
- return instantiate(tree, mode, pt)
- }
- if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) {
- // (14); the condition prevents chains of views
- debuglog("inferring view from " + tree.tpe + " to " + pt)
- val coercion = inferView(tree, tree.tpe, pt, true)
- // convert forward views of delegate types into closures wrapped around
- // the delegate's apply method (the "Invoke" method, which was translated into apply)
- if (forMSIL && coercion != null && isCorrespondingDelegate(tree.tpe, pt)) {
- val meth: Symbol = tree.tpe.member(nme.apply)
- debuglog("replacing forward delegate view with: " + meth + ":" + meth.tpe)
- return typed(Select(tree, meth), mode, pt)
- }
- if (coercion != EmptyTree) {
- def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe
- if (settings.logImplicitConv.value)
- unit.echo(tree.pos, msg)
-
- debuglog(msg)
- val silentContext = context.makeImplicit(context.ambiguousErrors)
- val res = newTyper(silentContext).typed(
- new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt)
- if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res
- }
- }
- }
- if (settings.debug.value) {
- log("error tree = " + tree)
- if (settings.explaintypes.value) explainTypes(tree.tpe, pt)
- }
-
- val found = tree.tpe
- if (!found.isErroneous && !pt.isErroneous) {
- if ((!context.reportErrors && isPastTyper) || tree.attachments.get[MacroExpansionAttachment].isDefined) {
- val (bound, req) = pt match {
- case ExistentialType(qs, tpe) => (qs, tpe)
- case _ => (Nil, pt)
- }
- val boundOrSkolems = bound ++ pt.skolemsExceptMethodTypeParams
- if (boundOrSkolems.nonEmpty) {
- // Ignore type errors raised in later phases that are due to mismatching types with existential skolems
- // We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
- // Here's my hypothsis why this happens. The pattern matcher defines a variable of type
- //
- // val x: T = expr
- //
- // where T is the type of expr, but T contains existential skolems ts.
- // In that case, this value definition does not typecheck.
- // The value definition
- //
- // val x: T forSome { ts } = expr
- //
- // would typecheck. Or one can simply leave out the type of the `val`:
- //
- // val x = expr
- //
- // SI-6029 shows another case where we also fail (in uncurry), but this time the expected
- // type is an existential type.
- //
- // The reason for both failures have to do with the way we (don't) transform
- // skolem types along with the trees that contain them. We'd need a
- // radically different approach to do it. But before investing a lot of time to
- // to do this (I have already sunk 3 full days with in the end futile attempts
- // to consistently transform skolems and fix 6029), I'd like to
- // investigate ways to avoid skolems completely.
- //
- // upd. The same problem happens when we try to typecheck the result of macro expansion against its expected type
- // (which is the return type of the macro definition instantiated in the context of expandee):
- //
- // Test.scala:2: error: type mismatch;
- // found : $u.Expr[Class[_ <: Object]]
- // required: reflect.runtime.universe.Expr[Class[?0(in value <local Test>)]] where type ?0(in value <local Test>) <: Object
- // scala.reflect.runtime.universe.reify(new Object().getClass)
- // ^
- // Therefore following Martin's advice I use this logic to recover from skolem errors after macro expansions
- // (by adding the ` || tree.attachments.get[MacroExpansionAttachment].isDefined` clause to the conditional above).
- //
- log("recovering from existential or skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- return adapt(tree, mode, deriveTypeWithWildcards(boundOrSkolems)(pt))
- }
- }
- // create an actual error
- AdaptTypeError(tree, found, pt)
- }
- setError(tree)
- }
- }
- fallBack
- }
+ vanillaAdapt(tree)
}
}
- def instantiate(tree: Tree, mode: Int, pt: Type): Tree = {
+ def instantiate(tree: Tree, mode: Mode, pt: Type): Tree = {
inferExprInstance(tree, context.extractUndetparams(), pt)
adapt(tree, mode, pt)
}
@@ -1292,19 +1283,17 @@ trait Typers extends Modes with Adaptations with Tags {
* with expected type Unit, but if that fails, try again with pt = WildcardType
* and discard the expression.
*/
- def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = {
+ def instantiateExpectingUnit(tree: Tree, mode: Mode): Tree = {
val savedUndetparams = context.undetparams
- silent(_.instantiate(tree, mode, UnitClass.tpe)) match {
- case SilentResultValue(t) => t
- case _ =>
- context.undetparams = savedUndetparams
- val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant())))
- typed(valueDiscard, mode, UnitClass.tpe)
+ silent(_.instantiate(tree, mode, UnitTpe)) orElse { _ =>
+ context.undetparams = savedUndetparams
+ val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant(()))))
+ typed(valueDiscard, mode, UnitTpe)
}
}
- def instantiatePossiblyExpectingUnit(tree: Tree, mode: Int, pt: Type): Tree = {
- if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass)
+ def instantiatePossiblyExpectingUnit(tree: Tree, mode: Mode, pt: Type): Tree = {
+ if (mode.typingExprNotFun && pt.typeSymbol == UnitClass)
instantiateExpectingUnit(tree, mode)
else
instantiate(tree, mode, pt)
@@ -1340,7 +1329,7 @@ trait Typers extends Modes with Adaptations with Tags {
inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match {
case EmptyTree => qual
case coercion =>
- if (settings.logImplicitConv.value)
+ if (settings.logImplicitConv)
unit.echo(qual.pos,
"applied implicit conversion from %s to %s = %s".format(
qual.tpe, searchTemplate, coercion.symbol.defString))
@@ -1363,43 +1352,36 @@ trait Typers extends Modes with Adaptations with Tags {
def doAdapt(restpe: Type) =
//util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ")
adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors)
- if (pt != WildcardType) {
- silent(_ => doAdapt(pt)) match {
- case SilentResultValue(result) if result != qual =>
- result
- case _ =>
- debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name)
- doAdapt(WildcardType)
- }
- } else
+
+ if (pt == WildcardType)
doAdapt(pt)
+ else silent(_ => doAdapt(pt)) filter (_ != qual) orElse (_ =>
+ logResult(s"fallback on implicits in adaptToArguments: $qual.$name")(doAdapt(WildcardType))
+ )
}
/** Try to apply an implicit conversion to `qual` so that it contains
* a method `name`. If that's ambiguous try taking arguments into
* account using `adaptToArguments`.
*/
- def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
- def onError(reportError: => Tree): Tree = {
- context.tree match {
- case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
- silent(_.typedArgs(args.map(_.duplicate), mode)) match {
- case SilentResultValue(args) =>
- if (args exists (_.isErrorTyped))
- reportError
- else
- adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors)
- case _ =>
- reportError
- }
- case _ =>
- reportError
- }
- }
- silent(_.adaptToMember(qual, HasMember(name), false)) match {
- case SilentResultValue(res) => res
- case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)})
+ def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Mode, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def onError(reportError: => Tree): Tree = context.tree match {
+ case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
+ ( silent (_.typedArgs(args.map(_.duplicate), mode))
+ filter (xs => !(xs exists (_.isErrorTyped)))
+ map (xs => adaptToArguments(qual, name, xs, WildcardType, reportAmbiguous, saveErrors))
+ orElse ( _ => reportError)
+ )
+ case _ =>
+ reportError
}
+
+ silent(_.adaptToMember(qual, HasMember(name), reportAmbiguous = false)) orElse (err =>
+ onError {
+ if (reportAmbiguous) context issue err
+ setError(tree)
+ }
+ )
}
/** Try to apply an implicit conversion to `qual` to that it contains a
@@ -1410,13 +1392,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (member(qual, name) != NoSymbol) qual
else adaptToMember(qual, HasMember(name))
- private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
- // XXX: see about using the class's symbol....
- enclTparams foreach (sym => context.scope.enter(sym))
- namer.enterValueParams(vparamss)
- typed(cbody)
- }
-
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
@@ -1518,126 +1493,251 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(tparam.pos, "type parameter of value class may not be specialized")
}
- def parentTypes(templ: Template): List[Tree] =
- if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
- else try {
- val clazz = context.owner
- // Normalize supertype and mixins so that supertype is always a class, not a trait.
- var supertpt = typedTypeConstructor(templ.parents.head)
- val firstParent = supertpt.tpe.typeSymbol
- var mixins = templ.parents.tail map typedType
- // If first parent is a trait, make it first mixin and add its superclass as first parent
- while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
- val supertpt1 = typedType(supertpt)
- if (!supertpt1.isErrorTyped) {
- mixins = supertpt1 :: mixins
- supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ /** Typechecks a parent type reference.
+ *
+ * This typecheck is harder than it might look, because it should honor early
+ * definitions and also perform type argument inference with the help of super call
+ * arguments provided in `encodedtpt`.
+ *
+ * The method is called in batches (batch = 1 time per each parent type referenced),
+ * two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
+ * and once from typer, when typechecking the definition.
+ *
+ * ***Arguments***
+ *
+ * `encodedtpt` represents the parent type reference wrapped in an `Apply` node
+ * which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
+ * If no value arguments are provided by the user, the `Apply` node is still
+ * there, but its `args` will be set to `Nil`.
+ * This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
+ *
+ * `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
+ * Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
+ * (I wrote "maybe" because trait constructors don't call super constructors).
+ * This argument is synthesized by `tools.nsc.ast.Trees.Template`.
+ *
+ * `inMixinPosition` indicates whether the reference is not the first in the
+ * list of parents (and therefore cannot be a class) or the opposite.
+ *
+ * ***Return value and side effects***
+ *
+ * Returns a `TypeTree` representing a resolved parent type.
+ * If the typechecked parent reference implies non-nullary and non-empty argument list,
+ * this argument list is attached to the returned value in SuperArgsAttachment.
+ * The attachment is necessary for the subsequent typecheck to fixup a super constructor call
+ * in the body of the primary constructor (see `typedTemplate` for details).
+ *
+ * This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
+ * described in the docs of that method. It might also attribute the Super(_, _) reference
+ * (if present) inside the primary constructor of `templ`.
+ *
+ * ***Example***
+ *
+ * For the following definition:
+ *
+ * class D extends {
+ * val x = 2
+ * val y = 4
+ * } with B(x)(3) with C(y) with T
+ *
+ * this method will be called six times:
+ *
+ * (3 times from the namer)
+ * typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
+ * typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
+ * typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
+ *
+ * (3 times from the typer)
+ * <the same three calls>
+ */
+ private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
+ val app = treeInfo.dissectApplied(encodedtpt)
+ val (treeInfo.Applied(core, _, argss), decodedtpt) = ((app, app.callee))
+ val argssAreTrivial = argss == Nil || argss == ListOfNil
+
+ // we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
+ // we'll have to check the probe for isTypeMacro anyways.
+ // therefore I think it's reasonable to trade a more specific "inherits itself" error
+ // for a generic, yet understandable "cyclic reference" error
+ var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
+ if (probe == null) probe = NoSymbol
+ probe.initialize
+
+ if (probe.isTrait || inMixinPosition) {
+ if (!argssAreTrivial) {
+ if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
+ else () // a class in a mixin position - this warrants an error in `validateParentClasses`
+ // therefore here we do nothing, e.g. don't check that the # of ctor arguments
+ // matches the # of ctor parameters or stuff like that
+ }
+ typedType(decodedtpt)
+ } else {
+ var supertpt = typedTypeConstructor(decodedtpt)
+ val supertparams = if (supertpt.hasSymbolField) supertpt.symbol.typeParams else Nil
+ if (supertparams.nonEmpty) {
+ typedPrimaryConstrBody(templ) {
+ val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
+ val supercall = New(supertpe, mmap(argss)(_.duplicate))
+ val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
+ ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
+ atPos(supertpt.pos.focus)(supercall)
+ } match {
+ case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt)
+ case tpt => supertpt = TypeTree(tpt.tpe) setPos supertpt.pos // SI-7224: don't .focus positions of the TypeTree of a parent that exists in source
}
}
- if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
- supertpt.tpe = AnyRefClass.tpe
-
- // Determine
- // - supertparams: Missing type parameters from supertype
- // - supertpe: Given supertype, polymorphic in supertparams
- val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
- var supertpe = supertpt.tpe
- if (!supertparams.isEmpty)
- supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
-
- // A method to replace a super reference by a New in a supercall
- def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
- case Apply(fn, args) =>
- treeCopy.Apply(scall, transformSuperCall(fn), args map (_.duplicate))
- case Select(Super(_, _), nme.CONSTRUCTOR) =>
- treeCopy.Select(
- scall,
- atPos(supertpt.pos.focus)(New(TypeTree(supertpe)) setType supertpe),
- nme.CONSTRUCTOR)
- }
+ // this is the place where we tell the typer what argss should be used for the super call
+ // if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
+ // the super call dummy is already good enough, so we don't need to do anything
+ if (argssAreTrivial) supertpt else supertpt updateAttachment SuperArgsAttachment(argss)
+ }
+ }
+ /** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
+ * Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
+ * `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
+ *
+ * ***Return value and side effects***
+ *
+ * If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
+ * Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
+ *
+ * As a side effect, this method attributes the underlying fields of early vals.
+ * Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
+ * at least once per definition. It'd be great to disentangle this logic at some point.
+ *
+ * ***Example***
+ *
+ * For the following definition:
+ *
+ * class D extends {
+ * val x = 2
+ * val y = 4
+ * } with B(x)(3) with C(y) with T
+ *
+ * the primary constructor of `templ` will be:
+ *
+ * Block(List(
+ * ValDef(NoMods, x, TypeTree(), 2)
+ * ValDef(NoMods, y, TypeTree(), 4)
+ * global.pendingSuperCall,
+ * Literal(Constant(())))
+ *
+ * Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
+ * which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
+ * from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
+ */
+ private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
treeInfo.firstConstructor(templ.body) match {
- case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
- // Convert constructor body to block in environment and typecheck it
+ case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
val (preSuperStats, superCall) = {
val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
(stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
}
- val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
- val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
- case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
- case _ => cunit.duplicate
- })
- val outercontext = context.outer
-
+ val superCall1 = (superCall match {
+ case global.pendingSuperCall => actualSuperCall
+ case EmptyTree => EmptyTree
+ }) orElse cunit
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
+ val clazz = context.owner
assert(clazz != NoSymbol, templ)
- val cscope = outercontext.makeNewScope(constr, outercontext.owner)
- val cbody2 = newTyper(cscope) // called both during completion AND typing.
- .typePrimaryConstrBody(clazz,
- cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
-
- superCall match {
- case Apply(_, _) =>
- val treeInfo.Applied(_, _, argss) = superCall
- val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
- if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
- ConstrArgsInTraitParentTpeError(sarg, firstParent)
- if (!supertparams.isEmpty)
- supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
- case _ =>
- if (!supertparams.isEmpty)
- MissingTypeArgumentsParentTpeError(supertpt)
+ val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
+ val cbody2 = { // called both during completion AND typing.
+ val typer1 = newTyper(cscope)
+ // XXX: see about using the class's symbol....
+ clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
+ typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
+ typer1.typed(cbody1)
}
val preSuperVals = treeInfo.preSuperFields(templ.body)
if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
- debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ devWarning("Wanted to zip empty presuper val list with " + preSuperStats)
else
- map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt setType ldef.symbol.tpe)
+ if (superCall1 == cunit) EmptyTree
+ else cbody2 match {
+ case Block(_, expr) => expr
+ case tree => tree
+ }
case _ =>
- if (!supertparams.isEmpty)
- MissingTypeArgumentsParentTpeError(supertpt)
- }
-/* experimental: early types as type arguments
- val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
- val earlyMap = new EarlyMap(clazz)
- List.mapConserve(supertpt :: mixins){ tpt =>
- val tpt1 = checkNoEscaping.privates(clazz, tpt)
- if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
+ EmptyTree
}
-*/
- //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
+ /** Makes sure that the first type tree in the list of parent types is always a class.
+ * If the first parent is a trait, prepend its supertype to the list until it's a class.
+ */
+ private def normalizeFirstParent(parents: List[Tree]): List[Tree] = {
+ @annotation.tailrec
+ def explode0(parents: List[Tree]): List[Tree] = {
+ val supertpt :: rest = parents // parents is always non-empty here - it only grows
+ if (supertpt.tpe.typeSymbol == AnyClass) {
+ supertpt setType AnyRefTpe
+ parents
+ } else if (treeInfo isTraitRef supertpt) {
+ val supertpt1 = typedType(supertpt)
+ def supersuper = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
+ if (supertpt1.isErrorTyped) rest
+ else explode0(supersuper :: supertpt1 :: rest)
+ } else parents
+ }
+
+ def explode(parents: List[Tree]) =
+ if (treeInfo isTraitRef parents.head) explode0(parents)
+ else parents
+
+ if (parents.isEmpty) Nil else explode(parents)
+ }
- // Certain parents are added in the parser before it is known whether
- // that class also declared them as parents. For instance, this is an
- // error unless we take corrective action here:
- //
- // case class Foo() extends Serializable
- //
- // So we strip the duplicates before typer.
- def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
+ /** Certain parents are added in the parser before it is known whether
+ * that class also declared them as parents. For instance, this is an
+ * error unless we take corrective action here:
+ *
+ * case class Foo() extends Serializable
+ *
+ * So we strip the duplicates before typer.
+ */
+ private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
case Nil => Nil
case x :: xs =>
val sym = x.symbol
- x :: fixDuplicates(
+ x :: fixDuplicateSyntheticParents(
if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
else xs
)
}
- fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
- }
- catch {
+ def typedParentTypes(templ: Template): List[Tree] = templ.parents match {
+ case Nil => List(atPos(templ.pos)(TypeTree(AnyRefTpe)))
+ case first :: rest =>
+ try {
+ val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
+ typedParentType(first, templ, inMixinPosition = false) +:
+ (rest map (typedParentType(_, templ, inMixinPosition = true)))))
+
+ // if that is required to infer the targs of a super call
+ // typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
+ // as a side effect, that typecheck also assigns types to the fields underlying early vals
+ // however if inference is not required, the typecheck doesn't happen
+ // and therefore early fields have their type trees not assigned
+ // here we detect this situation and take preventive measures
+ if (treeInfo.hasUntypedPreSuperFields(templ.body))
+ typedPrimaryConstrBody(templ)(EmptyTree)
+
+ supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
+ } catch {
case ex: TypeError =>
// fallback in case of cyclic errors
// @H none of the tests enter here but I couldn't rule it out
+ // upd. @E when a definition inherits itself, we end up here
+ // because `typedParentType` triggers `initialize` for parent types symbols
log("Type error calculating parents in template " + templ)
log("Error: " + ex)
ParentTypesError(templ, ex)
- List(TypeTree(AnyRefClass.tpe))
+ List(TypeTree(AnyRefTpe))
}
+ }
/** <p>Check that</p>
* <ul>
@@ -1677,14 +1777,16 @@ trait Typers extends Modes with Adaptations with Tags {
if (psym.isFinal)
pending += ParentFinalInheritanceError(parent, psym)
- if (psym.hasDeprecatedInheritanceAnnotation) {
+ val sameSourceFile = context.unit.source.file == psym.sourceFile
+
+ if (psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile) {
val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse ""
val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix"
unit.deprecationWarning(parent.pos, msg)
}
if (psym.isSealed && !phase.erasedTypes)
- if (context.unit.source.file == psym.sourceFile)
+ if (sameSourceFile)
psym addChild context.owner
else
pending += ParentSealedInheritanceError(parent, psym)
@@ -1692,15 +1794,11 @@ trait Typers extends Modes with Adaptations with Tags {
if (!(selfType <:< parent.tpe.typeOfThis) &&
!phase.erasedTypes &&
!context.owner.isSynthetic && // don't check synthetic concrete classes for virtuals (part of DEVIRTUALIZE)
- !settings.noSelfCheck.value && // setting to suppress this very check
!selfType.isErroneous &&
!parent.tpe.isErroneous)
{
- //Console.println(context.owner);//DEBUG
- //Console.println(context.owner.unsafeTypeParams);//DEBUG
- //Console.println(List.fromArray(context.owner.info.closure));//DEBUG
pending += ParentSelfTypeConformanceError(parent, selfType)
- if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis)
+ if (settings.explaintypes) explainTypes(selfType, parent.tpe.typeOfThis)
}
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
@@ -1714,13 +1812,6 @@ trait Typers extends Modes with Adaptations with Tags {
for (p <- parents) validateParentClass(p, superclazz)
}
-/*
- if (settings.Xshowcls.value != "" &&
- settings.Xshowcls.value == context.owner.fullName)
- println("INFO "+context.owner+
- ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+
- ", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType)))
-*/
pending.foreach(ErrorUtils.issueTypeError)
}
@@ -1730,7 +1821,7 @@ trait Typers extends Modes with Adaptations with Tags {
for (tparam <- clazz.typeParams) {
if (classinfo.expansiveRefs(tparam) contains tparam) {
val newinfo = ClassInfoType(
- classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))),
+ classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefTpe))),
classinfo.decls,
clazz)
clazz.setInfo {
@@ -1744,27 +1835,26 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /**
- * @param cdef ...
- * @return ...
- */
def typedClassDef(cdef: ClassDef): Tree = {
-// attributes(cdef)
val clazz = cdef.symbol
val typedMods = typedModifiers(cdef.mods)
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, parentTypes(cdef.impl))
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
- if ((clazz != ClassfileAnnotationClass) &&
- (clazz isNonBottomSubClass ClassfileAnnotationClass))
- restrictionWarning(cdef.pos, unit,
- "subclassing Classfile does not\n"+
- "make your annotation visible at runtime. If that is what\n"+
- "you want, you must write the annotation class in Java.")
+
+ if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) {
+ if (!clazz.owner.isPackageClass)
+ unit.error(clazz.pos, "inner classes cannot be classfile annotations")
+ else restrictionWarning(cdef.pos, unit,
+ """|subclassing Classfile does not
+ |make your annotation visible at runtime. If that is what
+ |you want, you must write the annotation class in Java.""".stripMargin)
+ }
+
if (!isPastTyper) {
for (ann <- clazz.getAnnotation(DeprecatedAttr)) {
val m = companionSymbolOf(clazz, context)
@@ -1776,10 +1866,6 @@ trait Typers extends Modes with Adaptations with Tags {
.setType(NoType)
}
- /**
- * @param mdef ...
- * @return ...
- */
def typedModuleDef(mdef: ModuleDef): Tree = {
// initialize all constructors of the linked class: the type completer (Namer.methodSig)
// might add default getters to this object. example: "object T; class T(x: Int = 1)"
@@ -1797,17 +1883,20 @@ trait Typers extends Modes with Adaptations with Tags {
|| clazz.isSerializable
)
val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
- parentTypes(mdef.impl) ++ (
+ typedParentTypes(mdef.impl) ++ (
if (noSerializable) Nil
else {
clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ List(TypeTree(SerializableTpe) setPos clazz.pos.focus)
}
)
})
val impl2 = finishMethodSynthesis(impl1, clazz, context)
+ if (mdef.symbol == PredefModule)
+ ensurePredefParentsAreInSameSourceFile(impl2)
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
// same owner. Until that can be straightened out we will warn on companion objects in package
@@ -1827,9 +1916,7 @@ trait Typers extends Modes with Adaptations with Tags {
def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
- val pos = if (m.pos.isDefined) m.pos else mdef.pos
- debugwarn(s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
- debugwarn(pos.lineContent + (if (pos.isDefined) " " * (pos.column - 1) + "^" else ""))
+ context.warning(if (m.pos.isDefined) m.pos else mdef.pos, s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
}
}
@@ -1838,6 +1925,12 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
+
+ private def ensurePredefParentsAreInSameSourceFile(template: Template) = {
+ val parentSyms = template.parents map (_.symbol) filterNot (_ == AnyRefClass)
+ if (parentSyms exists (_.associatedFile != PredefModule.associatedFile))
+ unit.error(template.pos, s"All parents of Predef must be defined in ${PredefModule.associatedFile}.")
+ }
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
* all the time, it is exposed here the module/class typing methods go through it.
* ...but it turns out it's also the ideal spot for namer/typer coordination for
@@ -1864,17 +1957,11 @@ trait Typers extends Modes with Adaptations with Tags {
if (txt eq context) namer.enterSym(tree)
else newNamer(txt).enterSym(tree)
- /**
- * @param templ ...
- * @param parents1 ...
- * <li> <!-- 2 -->
- * Check that inner classes do not inherit from Annotation
- * </li>
- * @return ...
+ /** <!-- 2 --> Check that inner classes do not inherit from Annotation
*/
def typedTemplate(templ: Template, parents1: List[Tree]): Template = {
val clazz = context.owner
- clazz.annotations.map(_.completeInfo)
+ clazz.annotations.map(_.completeInfo())
if (templ.symbol == NoSymbol)
templ setSymbol clazz.newLocalDummy(templ.pos)
val self1 = templ.self match {
@@ -1901,19 +1988,34 @@ trait Typers extends Modes with Adaptations with Tags {
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
+ if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
validateParentClasses(parents1, selfType)
if (clazz.isCase)
validateNoCaseAncestor(clazz)
+ if (clazz.isTrait && hasSuperArgs(parents1.head))
+ ConstrArgsInParentOfTraitError(parents1.head, clazz)
- if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
+ if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel)
unit.error(clazz.pos, "inner classes cannot be classfile annotations")
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
+ val body = {
val body =
if (isPastTyper || reporter.hasErrors) templ.body
else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
+ val primaryCtor = treeInfo.firstConstructor(body)
+ val primaryCtor1 = primaryCtor match {
+ case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
+ val argss = superArgs(parents1.head) getOrElse Nil
+ val pos = wrappingPos(parents1.head.pos, argss.flatten)
+ val superCall = atPos(pos)(PrimarySuperCall(argss))
+ deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
+ case _ => primaryCtor
+ }
+ body mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
+ }
val body1 = typedStats(body, templ.symbol)
@@ -1926,28 +2028,24 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe
+ treeCopy.Template(templ, parents1, self1, body1) setType clazz.tpe_*
}
/** Remove definition annotations from modifiers (they have been saved
- * into the symbol's ``annotations'' in the type completer / namer)
+ * into the symbol's `annotations` in the type completer / namer)
*
* However reification does need annotation definitions to proceed.
* Unfortunately, AnnotationInfo doesn't provide enough info to reify it in general case.
* The biggest problem is with the "atp: Type" field, which cannot be reified in some situations
* that involve locally defined annotations. See more about that in Reifiers.scala.
*
- * That's why the original tree gets saved into ``original'' field of AnnotationInfo (happens elsewhere).
+ * That's why the original tree gets saved into `original` field of AnnotationInfo (happens elsewhere).
* The field doesn't get pickled/unpickled and exists only during a single compilation run.
* This simultaneously allows us to reify annotations and to preserve backward compatibility.
*/
def typedModifiers(mods: Modifiers): Modifiers =
mods.copy(annotations = Nil) setPositions mods.positions
- /**
- * @param vdef ...
- * @return ...
- */
def typedValDef(vdef: ValDef): ValDef = {
val sym = vdef.symbol
val valDefTyper = {
@@ -1964,7 +2062,7 @@ trait Typers extends Modes with Adaptations with Tags {
val sym = vdef.symbol.initialize
val typedMods = typedModifiers(vdef.mods)
- sym.annotations.map(_.completeInfo)
+ sym.annotations.map(_.completeInfo())
val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
@@ -1999,10 +2097,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
/** Enter all aliases of local parameter accessors.
- *
- * @param clazz ...
- * @param vparamss ...
- * @param rhs ...
*/
def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) {
debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs")
@@ -2050,7 +2144,7 @@ trait Typers extends Modes with Adaptations with Tags {
orElse (superAcc getter superAcc.owner)
filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias)
)
- if (alias.exists && !alias.accessed.isVariable) {
+ if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) {
val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match {
case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed
case acc => acc
@@ -2119,7 +2213,7 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(pos, msg)
false
}
- /** Have to examine all parameters in all lists.
+ /* Have to examine all parameters in all lists.
*/
def paramssTypes(tp: Type): List[List[Type]] = tp match {
case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
@@ -2138,10 +2232,10 @@ trait Typers extends Modes with Adaptations with Tags {
val sym = paramType.typeSymbol
def paramPos = nthParamPos(listIdx, paramIdx)
- /** Not enough to look for abstract types; have to recursively check the bounds
- * of each abstract type for more abstract types. Almost certainly there are other
- * exploitable type soundness bugs which can be seen by bounding a type parameter
- * by an abstract type which itself is bounded by an abstract type.
+ /* Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
*/
def checkAbstract(tp0: Type, what: String): Boolean = {
def check(sym: Symbol): Boolean = !sym.isAbstractType || {
@@ -2165,51 +2259,6 @@ trait Typers extends Modes with Adaptations with Tags {
failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
- def typedUseCase(useCase: UseCase) {
- def stringParser(str: String): syntaxAnalyzer.Parser = {
- val file = new BatchSourceFile(context.unit.source.file, str) {
- override def positionInUltimateSource(pos: Position) = {
- pos.withSource(context.unit.source, useCase.pos.start)
- }
- }
- val unit = new CompilationUnit(file)
- new syntaxAnalyzer.UnitParser(unit)
- }
- val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
- val enclClass = context.enclClass.owner
- def defineAlias(name: Name) =
- if (context.scope.lookup(name) == NoSymbol) {
- lookupVariable(name.toString.substring(1), enclClass) match {
- case Some(repl) =>
- silent(_.typedTypeConstructor(stringParser(repl).typ())) match {
- case SilentResultValue(tpt) =>
- val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
- val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
- val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
- alias setInfo newInfo
- context.scope.enter(alias)
- case _ =>
- }
- case _ =>
- }
- }
- for (tree <- trees; t <- tree)
- t match {
- case Ident(name) if name startsWith '$' => defineAlias(name)
- case _ =>
- }
- useCase.aliases = context.scope.toList
- namer.enterSyms(trees)
- typedStats(trees, NoSymbol)
- useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
- if (settings.debug.value)
- useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
- }
-
- /**
- * @param ddef ...
- * @return ...
- */
def typedDefDef(ddef: DefDef): DefDef = {
val meth = ddef.symbol.initialize
@@ -2228,13 +2277,13 @@ trait Typers extends Modes with Adaptations with Tags {
val tparams1 = ddef.tparams mapConserve typedTypeDef
val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef)
- meth.annotations.map(_.completeInfo)
+ meth.annotations.map(_.completeInfo())
for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1)
if (isRepeatedParamType(vparam1.symbol.tpe))
StarParamNotLastError(vparam1)
- var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
+ val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt))
checkNonCyclic(ddef, tpt1)
ddef.tpt.setType(tpt1.tpe)
val typedMods = typedModifiers(ddef.mods)
@@ -2246,7 +2295,7 @@ trait Typers extends Modes with Adaptations with Tags {
meth.owner.isAnonOrRefinementClass))
InvalidConstructorDefError(ddef)
typed(ddef.rhs)
- } else if (meth.isTermMacro) {
+ } else if (meth.isMacro) {
// typechecking macro bodies is sort of unconventional
// that's why we employ our custom typing scheme orchestrated outside of the typer
transformedOr(ddef.rhs, typedMacroBody(this, ddef))
@@ -2302,10 +2351,10 @@ trait Typers extends Modes with Adaptations with Tags {
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
val typedMods = typedModifiers(tdef.mods)
- tdef.symbol.annotations.map(_.completeInfo)
+ tdef.symbol.annotations.map(_.completeInfo())
// @specialized should not be pickled when compiling with -no-specialize
- if (settings.nospecialization.value && currentRun.compiles(tdef.symbol)) {
+ if (settings.nospecialization && currentRun.compiles(tdef.symbol)) {
tdef.symbol.removeAnnotation(definitions.SpecializedClass)
tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass)
}
@@ -2329,7 +2378,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ldef @ LabelDef(_, _, _) =>
if (ldef.symbol == NoSymbol)
ldef.symbol = namer.enterInScope(
- context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitClass.tpe))
+ context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), UnitTpe))
case _ =>
}
}
@@ -2338,7 +2387,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!nme.isLoopHeaderLabel(ldef.symbol.name) || isPastTyper) {
val restpe = ldef.symbol.tpe.resultType
val rhs1 = typed(ldef.rhs, restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs1) setType restpe
}
else {
@@ -2346,26 +2395,20 @@ trait Typers extends Modes with Adaptations with Tags {
val rhs1 = typed(ldef.rhs)
val restpe = rhs1.tpe
if (restpe == initpe) { // stable result, no need to check again
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
treeCopy.LabelDef(ldef, ldef.name, ldef.params, rhs1) setType restpe
} else {
context.scope.unlink(ldef.symbol)
val sym2 = namer.enterInScope(
context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe))
val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe)
- ldef.params foreach (param => param.tpe = param.symbol.tpe)
+ ldef.params foreach (param => param setType param.symbol.tpe)
deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe
}
}
}
- /**
- * @param block ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def typedBlock(block: Block, mode: Int, pt: Type): Block = {
+ def typedBlock(block: Block, mode: Mode, pt: Type): Block = {
val syntheticPrivates = new ListBuffer[Symbol]
try {
namer.enterSyms(block.stats)
@@ -2427,7 +2470,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ => stat::Nil
})
val stats2 = typedStats(stats1, context.owner)
- val expr1 = typed(block.expr, mode & ~(FUNmode | QUALmode), pt)
+ val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt)
treeCopy.Block(block, stats2, expr1)
.setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst)
} finally {
@@ -2437,12 +2480,6 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /**
- * @param cdef ...
- * @param pattpe ...
- * @param pt ...
- * @return ...
- */
def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = {
// verify no _* except in last position
for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x)
@@ -2459,83 +2496,63 @@ trait Typers extends Modes with Adaptations with Tags {
if (pat1.tpe.paramSectionCount > 0)
pat1 setType pat1.tpe.finalResultType
- if (forInteractive) {
- for (bind @ Bind(name, _) <- cdef.pat)
- if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
- namer.enterIfNotThere(bind.symbol)
- }
+ for (bind @ Bind(name, _) <- cdef.pat)
+ if (name.toTermName != nme.WILDCARD && bind.symbol != null && bind.symbol != NoSymbol)
+ namer.enterIfNotThere(bind.symbol)
val guard1: Tree = if (cdef.guard == EmptyTree) EmptyTree
- else typed(cdef.guard, BooleanClass.tpe)
+ else typed(cdef.guard, BooleanTpe)
var body1: Tree = typed(cdef.body, pt)
- val contextWithTypeBounds = context.nextEnclosing(_.tree.isInstanceOf[CaseDef])
- if (contextWithTypeBounds.savedTypeBounds.nonEmpty) {
- body1.tpe = contextWithTypeBounds restoreTypeBounds body1.tpe
-
+ if (context.enclosingCaseDef.savedTypeBounds.nonEmpty) {
+ body1 modifyType context.enclosingCaseDef.restoreTypeBounds
// insert a cast if something typechecked under the GADT constraints,
// but not in real life (i.e., now that's we've reset the method's type skolems'
// infos back to their pre-GADT-constraint state)
if (isFullyDefined(pt) && !(body1.tpe <:< pt))
- body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.normalize))
-
+ body1 = typedPos(body1.pos)(gen.mkCast(body1, pt.dealiasWiden))
}
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
}
- // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
- // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
- object deskolemizeGADTSkolems extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
- typeRef(NoPrefix, sym.deSkolemize, args)
- case tp1 => tp1
- }
- }
-
def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
}
- def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+ def adaptCase(cdef: CaseDef, mode: Mode, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
- def ptOrLub(tps: List[Type], pt: Type ) = if (isFullyDefined(pt)) (pt, false) else weakLub(tps map (_.deconst))
- def ptOrLubPacked(trees: List[Tree], pt: Type) = if (isFullyDefined(pt)) (pt, false) else weakLub(trees map (c => packedType(c, context.owner).deconst))
+ def packedTypes(trees: List[Tree]): List[Type] = trees map (c => packedType(c, context.owner).deconst)
// takes untyped sub-trees of a match and type checks them
- def typedMatch(selector: Tree, cases: List[CaseDef], mode: Int, pt: Type, tree: Tree = EmptyTree): Match = {
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
+ def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = {
+ val selector1 = checkDead(typedByValueExpr(selector))
val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
val casesTyped = typedCases(cases, selectorTp, pt)
- val (resTp, needAdapt) =
- if (opt.virtPatmat) ptOrLubPacked(casesTyped, pt)
- else ptOrLub(casesTyped map (_.tpe), pt)
+ def finish(cases: List[CaseDef], matchType: Type) =
+ treeCopy.Match(tree, selector1, cases) setType matchType
- val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
-
- val matchTyped = treeCopy.Match(tree, selector1, casesAdapted) setType resTp
- if (!newPatternMatching) // TODO: remove this in 2.11 -- only needed for old pattern matcher
- new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(matchTyped)
- matchTyped
+ if (isFullyDefined(pt))
+ finish(casesTyped, pt)
+ else packedTypes(casesTyped) match {
+ case packed if sameWeakLubAsLub(packed) => finish(casesTyped, lub(packed))
+ case packed =>
+ val lub = weakLub(packed)
+ finish(casesTyped map (adaptCase(_, mode, lub)), lub)
+ }
}
- // match has been typed -- virtualize it if we're feeling experimental
- // (virtualized matches are expanded during type checking so they have the full context available)
- // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
- def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
- import patmat.{vpmName, PureMatchTranslator, OptimizingMatchTranslator}
+ // match has been typed -- virtualize it during type checking so the full context is available
+ def virtualizedMatch(match_ : Match, mode: Mode, pt: Type) = {
+ import patmat.{ vpmName, PureMatchTranslator }
// TODO: add fallback __match sentinel to predef
val matchStrategy: Tree =
- if (!(newPatternMatching && opt.experimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
- else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(ms) => ms
- case _ => null
- }
+ if (!(newPatternMatching && settings.Xexperimental && context.isNameInScope(vpmName._match))) null // fast path, avoiding the next line if there's no __match to be seen
+ else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null)
if (matchStrategy ne null) // virtualize
typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt)
@@ -2565,11 +2582,9 @@ trait Typers extends Modes with Adaptations with Tags {
* an alternative TODO: add partial function AST node or equivalent and get rid of this synthesis --> do everything in uncurry (or later)
* however, note that pattern matching codegen is designed to run *before* uncurry
*/
- def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Int, pt0: Type): Tree = {
- assert(pt0.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt0.")
-
- val pt = deskolemizeGADTSkolems(pt0)
- val targs = pt.normalize.typeArgs
+ def synthesizePartialFunction(paramName: TermName, paramPos: Position, tree: Tree, mode: Mode, pt: Type): Tree = {
+ assert(pt.typeSymbol == PartialFunctionClass, s"PartialFunction synthesis for match in $tree requires PartialFunction expected type, but got $pt.")
+ val targs = pt.dealiasWiden.typeArgs
// if targs.head isn't fully defined, we can translate --> error
targs match {
@@ -2583,7 +2598,7 @@ trait Typers extends Modes with Adaptations with Tags {
val argTp :: resTp :: Nil = targs
// targs must conform to Any for us to synthesize an applyOrElse (fallback to apply otherwise -- typically for @cps annotated targs)
- val targsValidParams = targs forall (_ <:< AnyClass.tpe)
+ val targsValidParams = targs forall (_ <:< AnyTpe)
val anonClass = (context.owner
newAnonymousFunctionClass tree.pos
@@ -2594,7 +2609,7 @@ trait Typers extends Modes with Adaptations with Tags {
val Match(sel, cases) = tree
// need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
- val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE_typed)).duplicate.asInstanceOf[CaseDef])
+ val casesTrue = cases map (c => deriveCaseDef(c)(x => atPos(x.pos.focus)(TRUE)).duplicate.asInstanceOf[CaseDef])
// must generate a new tree every time
def selector: Tree = gen.mkUnchecked(
@@ -2711,12 +2726,12 @@ trait Typers extends Modes with Adaptations with Tags {
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
methodBodyTyper.context.scope enter paramSym
- methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
+ methodSym setInfo MethodType(List(paramSym), BooleanTpe)
- val defaultCase = mkDefaultCase(FALSE_typed)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
+ val defaultCase = mkDefaultCase(FALSE)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanTpe)
- DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanTpe))
}
// only used for @cps annotated partial functions
@@ -2725,7 +2740,7 @@ trait Typers extends Modes with Adaptations with Tags {
val methodSym = anonClass.newMethod(nme.apply, tree.pos, FINAL | OVERRIDE)
val paramSym = mkParam(methodSym)
- methodSym setInfo MethodType(List(paramSym), AnyClass.tpe)
+ methodSym setInfo MethodType(List(paramSym), AnyTpe)
val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym))
// should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
@@ -2761,7 +2776,7 @@ trait Typers extends Modes with Adaptations with Tags {
members foreach (m => anonClass.info.decls enter m.symbol)
val typedBlock = typedPos(tree.pos, mode, pt) {
- Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+ Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
))
}
@@ -2773,24 +2788,17 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
-
- /**
- * @param fun ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- private def typedFunction(fun: Function, mode: Int, pt: Type): Tree = {
+ private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = {
val numVparams = fun.vparams.length
if (numVparams > definitions.MaxFunctionArity)
return MaxFunctionArityError(fun)
def decompose(pt: Type): (Symbol, List[Type], Type) =
if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
- ( pt.normalize.typeArgs.length - 1 == numVparams
+ ( pt.dealiasWiden.typeArgs.length - 1 == numVparams
|| fun.vparams.exists(_.tpt.isEmpty)
))
- (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
+ (pt.typeSymbol, pt.dealiasWiden.typeArgs.init, pt.dealiasWiden.typeArgs.last)
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
@@ -2805,15 +2813,13 @@ trait Typers extends Modes with Adaptations with Tags {
else {
fun match {
case etaExpansion(vparams, fn, args) =>
- silent(_.typed(fn, forFunMode(mode), pt)) match {
- case SilentResultValue(fn1) if context.undetparams.isEmpty =>
+ silent(_.typed(fn, mode.forFunMode, pt)) filter (_ => context.undetparams.isEmpty) map { fn1 =>
// if context,undetparams is not empty, the function was polymorphic,
// so we need the missing arguments to infer its type. See #871
//println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams)
val ftpe = normalize(fn1.tpe) baseType FunctionClass(numVparams)
if (isFunctionType(ftpe) && isFullyDefined(ftpe))
return typedFunction(fun, mode, ftpe)
- case _ =>
}
case _ =>
}
@@ -2842,16 +2848,13 @@ trait Typers extends Modes with Adaptations with Tags {
if (context.retyping) context.scope enter vparam.symbol
vparam.symbol
}
- val vparams = fun.vparams mapConserve (typedValDef)
- // for (vparam <- vparams) {
- // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
- // }
+ val vparams = fun.vparams mapConserve typedValDef
val formals = vparamSyms map (_.tpe)
val body1 = typed(fun.body, respt)
val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
- // body = checkNoEscaping.locals(context.scope, restpe, body)
- treeCopy.Function(fun, vparams, body1).setType(funtpe)
+ val funtpe = appliedType(clazz, formals :+ restpe: _*)
+
+ treeCopy.Function(fun, vparams, body1) setType funtpe
}
}
}
@@ -2869,13 +2872,8 @@ trait Typers extends Modes with Adaptations with Tags {
val att = templ.attachments.get[CompoundTypeTreeOriginalAttachment].getOrElse(CompoundTypeTreeOriginalAttachment(Nil, Nil))
templ.removeAttachment[CompoundTypeTreeOriginalAttachment]
templ updateAttachment att.copy(stats = stats1)
- for (stat <- stats1 if stat.isDef) {
- val member = stat.symbol
- if (!(context.owner.ancestors forall
- (bc => member.matchingSymbol(bc, context.owner.thisType) == NoSymbol))) {
- member setFlag OVERRIDE
- }
- }
+ for (stat <- stats1 if stat.isDef && stat.symbol.isOverridingSymbol)
+ stat.symbol setFlag OVERRIDE
}
}
@@ -2883,17 +2881,6 @@ trait Typers extends Modes with Adaptations with Tags {
case Some(imp1: Import) => imp1
case _ => log("unhandled import: "+imp+" in "+unit); imp
}
- private def isWarnablePureExpression(tree: Tree) = tree match {
- case EmptyTree | Literal(Constant(())) => false
- case _ =>
- !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && {
- val sym = tree.symbol
- (sym == null) || !(sym.isModule || sym.isLazy) || {
- debuglog("'Pure' but side-effecting expression in statement position: " + tree)
- false
- }
- }
- }
def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
val inBlock = exprOwner == context.owner
@@ -2908,7 +2895,7 @@ trait Typers extends Modes with Adaptations with Tags {
case imp @ Import(_, _) =>
imp.symbol.initialize
if (!imp.symbol.isError) {
- context = context.makeNewImport(imp)
+ context = context.make(imp)
typedImport(imp)
} else EmptyTree
case _ =>
@@ -2922,7 +2909,7 @@ trait Typers extends Modes with Adaptations with Tags {
} else newTyper(context.make(stat, exprOwner))
// XXX this creates a spurious dead code warning if an exception is thrown
// in a constructor, even if it is the only thing in the constructor.
- val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType))
+ val result = checkDead(localTyper.typedByValueExpr(stat))
if (treeInfo.isSelfOrSuperConstrCall(result)) {
context.inConstructorSuffix = true
@@ -2930,7 +2917,7 @@ trait Typers extends Modes with Adaptations with Tags {
ConstructorsOrderError(stat)
}
- if (isWarnablePureExpression(result)) context.warning(stat.pos,
+ if (treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos,
"a pure expression does nothing in statement position; " +
"you may be omitting necessary parentheses"
)
@@ -2939,8 +2926,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- /** 'accessor' and 'accessed' are so similar it becomes very difficult to
- * follow the logic, so I renamed one to something distinct.
+ /* 'accessor' and 'accessed' are so similar it becomes very difficult to
+ * follow the logic, so I renamed one to something distinct.
*/
def accesses(looker: Symbol, accessed: Symbol) = accessed.hasLocalFlag && (
(accessed.isParamAccessor)
@@ -2981,7 +2968,7 @@ trait Typers extends Modes with Adaptations with Tags {
// SI-5877 The decls of a package include decls of the package object. But we don't want to add
// the corresponding synthetics to the package class, only to the package object class.
def shouldAdd(sym: Symbol) =
- inBlock || !isInPackageObject(sym, context.owner)
+ inBlock || !context.isInPackageObject(sym, context.owner)
for (sym <- scope if shouldAdd(sym))
for (tree <- context.unit.synthetics get sym) {
newStats += typedStat(tree) // might add even more synthetics to the scope
@@ -3034,14 +3021,14 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = {
- val typedMode = onlyStickyModes(mode) | newmode
- val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt))
+ def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = {
+ val typedMode = mode.onlySticky | newmode
+ val t = withCondConstrTyper(mode.inSccMode)(_.typed(arg, typedMode, pt))
checkDead.inMode(typedMode, t)
}
- def typedArgs(args: List[Tree], mode: Int) =
- args mapConserve (arg => typedArg(arg, mode, 0, WildcardType))
+ def typedArgs(args: List[Tree], mode: Mode) =
+ args mapConserve (arg => typedArg(arg, mode, NOmode, WildcardType))
/** Type trees in `args0` against corresponding expected type in `adapted0`.
*
@@ -3051,21 +3038,19 @@ trait Typers extends Modes with Adaptations with Tags {
*
* (docs reverse-engineered -- AM)
*/
- def typedArgs(args0: List[Tree], mode: Int, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
- val sticky = onlyStickyModes(mode)
+ def typedArgs(args0: List[Tree], mode: Mode, formals0: List[Type], adapted0: List[Type]): List[Tree] = {
def loop(args: List[Tree], formals: List[Type], adapted: List[Type]): List[Tree] = {
if (args.isEmpty || adapted.isEmpty) Nil
else {
// No formals left or * indicates varargs.
val isVarArgs = formals.isEmpty || formals.tail.isEmpty && isRepeatedParamType(formals.head)
- val typedMode = sticky | (
- if (isVarArgs) STARmode | BYVALmode
- else if (isByNameParamType(formals.head)) 0
- else BYVALmode
- )
- val tree = typedArg(args.head, mode, typedMode, adapted.head)
+ val isByName = formals.nonEmpty && isByNameParamType(formals.head)
+ def typedMode = if (isByName) mode.onlySticky else mode.onlySticky | BYVALmode
+ def body = typedArg(args.head, mode, typedMode, adapted.head)
+ def arg1 = if (isVarArgs) context.withinStarPatterns(body) else body
+
// formals may be empty, so don't call tail
- tree :: loop(args.tail, formals drop 1, adapted.tail)
+ arg1 :: loop(args.tail, formals drop 1, adapted.tail)
}
}
loop(args0, formals0, adapted0)
@@ -3111,26 +3096,25 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
// TODO_NMT: check the assumption that args nonEmpty
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
def preSelectOverloaded(fun: Tree): Tree = {
- if (fun.hasSymbol && fun.symbol.isOverloaded) {
+ if (fun.hasSymbolField && fun.symbol.isOverloaded) {
// remove alternatives with wrong number of parameters without looking at types.
- // less expensive than including them in inferMethodAlternatvie (see below).
+ // less expensive than including them in inferMethodAlternative (see below).
def shapeType(arg: Tree): Type = arg match {
case Function(vparams, body) =>
- functionType(vparams map (vparam => AnyClass.tpe), shapeType(body))
+ functionType(vparams map (_ => AnyTpe), shapeType(body))
case AssignOrNamedArg(Ident(name), rhs) =>
NamedType(name, shapeType(rhs))
case _ =>
- NothingClass.tpe
+ NothingTpe
}
val argtypes = args map shapeType
val pre = fun.symbol.tpe.prefix
-
var sym = fun.symbol filter { alt =>
// must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
// now fixed by using isWeaklyCompatible in exprTypeArgs
@@ -3142,20 +3126,19 @@ trait Typers extends Modes with Adaptations with Tags {
// Types: "refs = Array(Map(), Map())". I determined that inference fails if there are at
// least two invariant type parameters. See the test case I checked in to help backstop:
// pos/isApplicableSafe.scala.
- isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
+ isApplicableSafe(context.undetparams, followApply(pre memberType alt), argtypes, pt)
}
if (sym.isOverloaded) {
- val sym1 = sym filter (alt => {
// eliminate functions that would result from tupling transforms
// keeps alternatives with repeated params
- hasExactlyNumParams(followApply(alt.tpe), argtypes.length) ||
- // also keep alts which define at least one default
- alt.tpe.paramss.exists(_.exists(_.hasDefault))
- })
+ val sym1 = sym filter (alt =>
+ isApplicableBasedOnArity(pre memberType alt, argtypes.length, varargsStar = false, tuplingAllowed = false)
+ || alt.tpe.params.exists(_.hasDefault)
+ )
if (sym1 != NoSymbol) sym = sym1
}
if (sym == NoSymbol) fun
- else adapt(fun setSymbol sym setType pre.memberType(sym), forFunMode(mode), WildcardType)
+ else adapt(fun setSymbol sym setType pre.memberType(sym), mode.forFunMode, WildcardType)
} else fun
}
@@ -3164,28 +3147,30 @@ trait Typers extends Modes with Adaptations with Tags {
fun.tpe match {
case OverloadedType(pre, alts) =>
def handleOverloaded = {
- val undetparams = context.extractUndetparams()
-
- val argtpes = new ListBuffer[Type]
- val amode = forArgMode(fun, mode)
- val args1 = args map {
- case arg @ AssignOrNamedArg(Ident(name), rhs) =>
- // named args: only type the righthand sides ("unknown identifier" errors otherwise)
- val rhs1 = typedArg(rhs, amode, BYVALmode, WildcardType)
- argtpes += NamedType(name, rhs1.tpe.deconst)
- // the assign is untyped; that's ok because we call doTypedApply
- atPos(arg.pos) { new AssignOrNamedArg(arg.lhs, rhs1) }
- case arg =>
- val arg1 = typedArg(arg, amode, BYVALmode, WildcardType)
- argtpes += arg1.tpe.deconst
- arg1
+ val undetparams = context.undetparams
+ val (args1, argTpes) = context.savingUndeterminedTypeParams() {
+ val amode = forArgMode(fun, mode)
+ def typedArg0(tree: Tree) = typedArg(tree, amode, BYVALmode, WildcardType)
+ args.map {
+ case arg @ AssignOrNamedArg(Ident(name), rhs) =>
+ // named args: only type the righthand sides ("unknown identifier" errors otherwise)
+ val rhs1 = typedArg0(rhs)
+ // the assign is untyped; that's ok because we call doTypedApply
+ val arg1 = treeCopy.AssignOrNamedArg(arg, arg.lhs, rhs1)
+ (arg1, NamedType(name, rhs1.tpe.deconst))
+ case arg @ treeInfo.WildcardStarArg(repeated) =>
+ val arg1 = typedArg0(arg)
+ (arg1, RepeatedType(arg1.tpe.deconst))
+ case arg =>
+ val arg1 = typedArg0(arg)
+ (arg1, arg1.tpe.deconst)
+ }.unzip
}
- context.undetparams = undetparams
if (context.hasErrors)
setError(tree)
else {
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args))
- doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt)
+ inferMethodAlternative(fun, undetparams, argTpes, pt)
+ doTypedApply(tree, adapt(fun, mode.forFunMode, WildcardType), args1, mode, pt)
}
}
handleOverloaded
@@ -3193,51 +3178,47 @@ trait Typers extends Modes with Adaptations with Tags {
case mt @ MethodType(params, _) =>
val paramTypes = mt.paramTypes
// repeat vararg as often as needed, remove by-name
- val formals = formalTypes(paramTypes, args.length)
+ val argslen = args.length
+ val formals = formalTypes(paramTypes, argslen)
- /** Try packing all arguments into a Tuple and apply `fun`
- * to that. This is the last thing which is tried (after
- * default arguments)
+ /* Try packing all arguments into a Tuple and apply `fun`
+ * to that. This is the last thing which is tried (after
+ * default arguments)
*/
- def tryTupleApply: Option[Tree] = {
- // if 1 formal, 1 arg (a tuple), otherwise unmodified args
- val tupleArgs = actualArgs(tree.pos.makeTransparent, args, formals.length)
-
- if (!sameLength(tupleArgs, args) && !isUnitForVarArgs(args, params)) {
+ def tryTupleApply: Option[Tree] = (
+ if (eligibleForTupleConversion(paramTypes, argslen) && !phase.erasedTypes) {
+ val tupleArgs = List(atPos(tree.pos.makeTransparent)(gen.mkTuple(args)))
// expected one argument, but got 0 or >1 ==> try applying to tuple
// the inner "doTypedApply" does "extractUndetparams" => restore when it fails
val savedUndetparams = context.undetparams
- silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match {
- case SilentResultValue(t) =>
+ silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) map { t =>
// Depending on user options, may warn or error here if
// a Unit or tuple was inserted.
Some(t) filter (tupledTree =>
- !inExprModeButNot(mode, FUNmode)
+ !mode.typingExprNotFun
|| tupledTree.symbol == null
|| checkValidAdaptation(tupledTree, args)
)
- case _ =>
- context.undetparams = savedUndetparams
- None
+ } orElse { _ => context.undetparams = savedUndetparams ; None }
}
- } else None
- }
+ else None
+ )
- /** Treats an application which uses named or default arguments.
- * Also works if names + a vararg used: when names are used, the vararg
- * parameter has to be specified exactly once. Note that combining varargs
- * and defaults is ruled out by typedDefDef.
+ /* Treats an application which uses named or default arguments.
+ * Also works if names + a vararg used: when names are used, the vararg
+ * parameter has to be specified exactly once. Note that combining varargs
+ * and defaults is ruled out by typedDefDef.
*/
def tryNamesDefaults: Tree = {
val lencmp = compareLengths(args, formals)
def checkNotMacro() = {
- if (fun.symbol != null && fun.symbol.filter(sym => sym != null && sym.isTermMacro && !sym.isErroneous) != NoSymbol)
+ if (treeInfo.isMacroApplication(fun))
tryTupleApply getOrElse duplErrorTree(NamedAndDefaultArgumentsNotSupportedForMacros(tree, fun))
}
if (mt.isErroneous) duplErrTree
- else if (inPatternMode(mode)) {
+ else if (mode.inPatternMode) {
// #2064
duplErrorTree(WrongNumberOfArgsError(tree, fun))
} else if (lencmp > 0) {
@@ -3248,10 +3229,10 @@ trait Typers extends Modes with Adaptations with Tags {
val (namelessArgs, argPos) = removeNames(Typer.this)(args, params)
if (namelessArgs exists (_.isErroneous)) {
duplErrTree
- } else if (!isIdentity(argPos) && !sameLength(formals, params))
- // !isIdentity indicates that named arguments are used to re-order arguments
+ } else if (!allArgsArePositional(argPos) && !sameLength(formals, params))
+ // !allArgsArePositional indicates that named arguments are used to re-order arguments
duplErrorTree(MultipleVarargError(tree))
- else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) {
+ else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) {
// if there's no re-ordering, and fun is not transformed, no need to transform
// more than an optimization, e.g. important in "synchronized { x = update-x }"
checkNotMacro()
@@ -3301,7 +3282,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (!sameLength(formals, args) || // wrong nb of arguments
- (args exists isNamed) || // uses a named argument
+ (args exists isNamedArg) || // uses a named argument
isNamedApplyBlock(fun)) { // fun was transformed to a named apply block =>
// integrate this application into the block
if (dyna.isApplyDynamicNamed(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt)
@@ -3332,31 +3313,11 @@ trait Typers extends Modes with Adaptations with Tags {
// precise(foo) : foo.type => foo.type
val restpe = mt.resultType(args1 map (arg => gen.stableTypeFor(arg) getOrElse arg.tpe))
def ifPatternSkipFormals(tp: Type) = tp match {
- case MethodType(_, rtp) if (inPatternMode(mode)) => rtp
+ case MethodType(_, rtp) if (mode.inPatternMode) => rtp
case _ => tp
}
- // Replace the Delegate-Chainer methods += and -= with corresponding
- // + and - calls, which are translated in the code generator into
- // Combine and Remove
- if (forMSIL) {
- fun match {
- case Select(qual, name) =>
- if (isSubType(qual.tpe, DelegateClass.tpe)
- && (name == encode("+=") || name == encode("-="))) {
- val n = if (name == encode("+=")) nme.PLUS else nme.MINUS
- val f = Select(qual, n)
- // the compiler thinks, the PLUS method takes only one argument,
- // but he thinks it's an instance method -> still two ref's on the stack
- // -> translated by backend
- val rhs = treeCopy.Apply(tree, f, args)
- return typed(Assign(qual, rhs))
- }
- case _ => ()
- }
- }
-
- /**
+ /*
* This is translating uses of List() into Nil. This is less
* than ideal from a consistency standpoint, but it shouldn't be
* altered without due caution.
@@ -3364,7 +3325,7 @@ trait Typers extends Modes with Adaptations with Tags {
* forced during kind-arity checking, so it is guarded by additional
* tests to ensure we're sufficiently far along.
*/
- if (args.isEmpty && !forInteractive && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
+ if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply))
atPos(tree.pos)(gen.mkNil setType restpe)
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
@@ -3378,7 +3339,7 @@ trait Typers extends Modes with Adaptations with Tags {
doTypedApply(tree, fun, args, mode, pt)
} else {
def handlePolymorphicCall = {
- assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns
+ assert(!mode.inPatternMode, mode) // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
val strictTargs = map2(lenientTargs, tparams)((targ, tparam) =>
if (targ == WildcardType) tparam.tpeHK else targ)
@@ -3404,9 +3365,8 @@ trait Typers extends Modes with Adaptations with Tags {
// define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun"
// returns those undetparams which have not been instantiated.
val undetparams = inferMethodInstance(fun, tparams, args1, pt)
- val result = doTypedApply(tree, fun, args1, mode, pt)
- context.undetparams = undetparams
- result
+ try doTypedApply(tree, fun, args1, mode, pt)
+ finally context.undetparams = undetparams
}
}
handlePolymorphicCall
@@ -3420,15 +3380,16 @@ trait Typers extends Modes with Adaptations with Tags {
if (!tree.isErrorTyped) setError(tree) else tree
// @H change to setError(treeCopy.Apply(tree, fun, args))
- case otpe if inPatternMode(mode) && unapplyMember(otpe).exists =>
+ case otpe if mode.inPatternMode && unapplyMember(otpe).exists =>
doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
- duplErrorTree(ApplyWithoutArgsError(tree, fun))
+ if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
+ else duplErrorTree(ApplyWithoutArgsError(tree, fun))
}
}
- def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def duplErrTree = setError(treeCopy.Apply(tree, fun0, args))
def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree }
@@ -3463,7 +3424,7 @@ trait Typers extends Modes with Adaptations with Tags {
else None
if (!isApplicableSafe(Nil, unappType, List(pt), WildcardType)) {
- //Console.println("UNAPP: need to typetest, arg.tpe = "+arg.tpe+", unappType = "+unappType)
+ //Console.println(s"UNAPP: need to typetest, arg: ${arg.tpe} unappType: $unappType")
val (freeVars, unappFormal) = freshArgType(unappType.skolemizeExistential(context.owner, tree))
val unapplyContext = context.makeNewScope(context.tree, context.owner)
freeVars foreach unapplyContext.scope.enter
@@ -3473,29 +3434,25 @@ trait Typers extends Modes with Adaptations with Tags {
// turn any unresolved type variables in freevars into existential skolems
val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv))
- arg.tpe = pattp.substSym(freeVars, skolems)
+ arg setType pattp.substSym(freeVars, skolems)
argDummy setInfo arg.tpe
}
- // setType null is necessary so that ref will be stabilized; see bug 881
- val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg)))
+ // clearing the type is necessary so that ref will be stabilized; see bug 881
+ val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapp), List(arg)))
if (fun1.tpe.isErroneous) duplErrTree
else {
- val resTp = fun1.tpe.finalResultType.normalize
+ val resTp = fun1.tpe.finalResultType.dealiasWiden
val nbSubPats = args.length
val (formals, formalsExpanded) =
extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
- // This used to be the following (failing) assert:
- // assert(isFullyDefined(pt), tree+" ==> "+UnApply(fun1, args1)+", pt = "+pt)
- // I modified as follows. See SI-1048.
- val pt1 = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
-
+ val pt1 = ensureFullyDefined(pt) // SI-1048
val itype = glb(List(pt1, arg.tpe))
- arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
+ arg setType pt1 // restore type (arg is a dummy tree, just needs to pass typechecking)
val unapply = UnApply(fun1, args1) setPos tree.pos setType itype
// if the type that the unapply method expects for its argument is uncheckable, wrap in classtag extractor
@@ -3530,16 +3487,21 @@ trait Typers extends Modes with Adaptations with Tags {
// if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
// return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (!opt.virtPatmat || isPastTyper) None else {
+ def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
// only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
- pt.normalize.typeConstructor match {
+ // but at least make a proper type before passing it elsewhere
+ val pt1 = pt.dealiasWiden match {
+ case tr @ TypeRef(pre, sym, args) if args.nonEmpty => copyTypeRef(tr, pre, sym, sym.typeParams map (_.tpeHK)) // replace actual type args with dummies
+ case pt1 => pt1
+ }
+ pt1 match {
// if at least one of the types in an intersection is checkable, use the checkable ones
// this avoids problems as in run/matchonseq.scala, where the expected type is `Coll with scala.collection.SeqLike`
// Coll is an abstract type, but SeqLike of course is not
- case RefinedType(parents, _) if (parents.length >= 2) && (parents.exists(tp => !infer.containsUnchecked(tp))) =>
+ case RefinedType(ps, _) if ps.length > 1 && (ps exists infer.isCheckable) =>
None
- case ptCheckable if infer.containsUnchecked(ptCheckable) =>
+ case ptCheckable if infer isUncheckable ptCheckable =>
val classTagExtractor = resolveClassTag(pos, ptCheckable)
if (classTagExtractor != EmptyTree && unapplyMember(classTagExtractor.tpe) != NoSymbol)
@@ -3552,25 +3514,33 @@ trait Typers extends Modes with Adaptations with Tags {
/**
* Convert an annotation constructor call into an AnnotationInfo.
- *
- * @param annClass the expected annotation class
*/
- def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = {
- lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil)
+ def typedAnnotation(ann: Tree, mode: Mode = EXPRmode, selfsym: Symbol = NoSymbol): AnnotationInfo = {
var hasError: Boolean = false
val pending = ListBuffer[AbsTypeError]()
+ def finish(res: AnnotationInfo): AnnotationInfo = {
+ if (hasError) {
+ pending.foreach(ErrorUtils.issueTypeError)
+ ErroneousAnnotation
+ }
+ else res
+ }
+
def reportAnnotationError(err: AbsTypeError) = {
pending += err
hasError = true
- annotationError
+ ErroneousAnnotation
}
- /** Calling constfold right here is necessary because some trees (negated
- * floats and literals in particular) are not yet folded.
+ /* Calling constfold right here is necessary because some trees (negated
+ * floats and literals in particular) are not yet folded.
*/
def tryConst(tr: Tree, pt: Type): Option[LiteralAnnotArg] = {
- val const: Constant = typed(constfold(tr), EXPRmode, pt) match {
+ // The typed tree may be relevantly different than the tree `tr`,
+ // e.g. it may have encountered an implicit conversion.
+ val ttree = typed(constfold(tr), pt)
+ val const: Constant = ttree match {
case l @ Literal(c) if !l.isErroneous => c
case tree => tree.tpe match {
case ConstantType(c) => c
@@ -3579,29 +3549,36 @@ trait Typers extends Modes with Adaptations with Tags {
}
if (const == null) {
- reportAnnotationError(AnnotationNotAConstantError(tr)); None
+ reportAnnotationError(AnnotationNotAConstantError(ttree)); None
} else if (const.value == null) {
reportAnnotationError(AnnotationArgNullError(tr)); None
} else
Some(LiteralAnnotArg(const))
}
- /** Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
- * an error message is reported and None is returned.
+ /* Converts an untyped tree to a ClassfileAnnotArg. If the conversion fails,
+ * an error message is reported and None is returned.
*/
def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) =>
reportAnnotationError(ArrayConstantsError(tree)); None
case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
- val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true)
+ val annInfo = typedAnnotation(ann, mode, NoSymbol)
+ val annType = annInfo.tpe
+
+ if (!annType.typeSymbol.isSubClass(pt.typeSymbol))
+ reportAnnotationError(AnnotationTypeMismatchError(tpt, annType, annType))
+ else if (!annType.typeSymbol.isSubClass(ClassfileAnnotationClass))
+ reportAnnotationError(NestedAnnotationError(ann, annType))
+
if (annInfo.atp.isErroneous) { hasError = true; None }
else Some(NestedAnnotArg(annInfo))
// use of Array.apply[T: ClassTag](xs: T*): Array[T]
// and Array.apply(x: Int, xs: Int*): Array[Int] (and similar)
case Apply(fun, args) =>
- val typedFun = typed(fun, forFunMode(mode), WildcardType)
+ val typedFun = typed(fun, mode.forFunMode)
if (typedFun.symbol.owner == ArrayModule.moduleClass && typedFun.symbol.name == nme.apply)
pt match {
case TypeRef(_, ArrayClass, targ :: _) =>
@@ -3629,44 +3606,42 @@ trait Typers extends Modes with Adaptations with Tags {
}
// begin typedAnnotation
- val (fun, argss) = {
- def extract(fun: Tree, outerArgss: List[List[Tree]]):
- (Tree, List[List[Tree]]) = fun match {
- case Apply(f, args) =>
- extract(f, args :: outerArgss)
- case Select(New(tpt), nme.CONSTRUCTOR) =>
- (fun, outerArgss)
- case _ =>
- reportAnnotationError(UnexpectedTreeAnnotation(fun))
- (setError(fun), outerArgss)
- }
- extract(ann, List())
- }
-
- val res = if (fun.isErroneous) annotationError
- else {
- val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType)
- val annType = tpt.tpe
+ val treeInfo.Applied(fun0, targs, argss) = ann
+ if (fun0.isErroneous)
+ return finish(ErroneousAnnotation)
+ val typedFun0 = typed(fun0, mode.forFunMode)
+ val typedFunPart = (
+ // If there are dummy type arguments in typeFun part, it suggests we
+ // must type the actual constructor call, not only the select. The value
+ // arguments are how the type arguments will be inferred.
+ if (targs.isEmpty && typedFun0.exists(t => t.tpe != null && isDummyAppliedType(t.tpe)))
+ logResult(s"Retyped $typedFun0 to find type args")(typed(argss.foldLeft(fun0)(Apply(_, _))))
+ else
+ typedFun0
+ )
+ val treeInfo.Applied(typedFun @ Select(New(annTpt), _), _, _) = typedFunPart
+ val annType = annTpt.tpe
- if (typedFun.isErroneous) annotationError
+ finish(
+ if (typedFun.isErroneous)
+ ErroneousAnnotation
else if (annType.typeSymbol isNonBottomSubClass ClassfileAnnotationClass) {
// annotation to be saved as java classfile annotation
val isJava = typedFun.symbol.owner.isJavaDefined
- if (!annType.typeSymbol.isNonBottomSubClass(annClass)) {
- reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType))
- } else if (argss.length > 1) {
+ if (argss.length > 1) {
reportAnnotationError(MultipleArgumentListForAnnotationError(ann))
- } else {
+ }
+ else {
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
val names = new scala.collection.mutable.HashSet[Symbol]
- def hasValue = names exists (_.name == nme.value)
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
+
+ def hasValue = names exists (_.name == nme.value)
val args = argss match {
- case List(List(arg)) if !isNamed(arg) && hasValue =>
- List(new AssignOrNamedArg(Ident(nme.value), arg))
- case as :: _ => as
+ case (arg :: Nil) :: Nil if !isNamedArg(arg) && hasValue => gen.mkNamedArg(nme.value, arg) :: Nil
+ case args :: Nil => args
}
val nvPairs = args map {
@@ -3696,46 +3671,33 @@ trait Typers extends Modes with Adaptations with Tags {
reportAnnotationError(AnnotationMissingArgError(ann, annType, sym))
}
- if (hasError) annotationError
+ if (hasError) ErroneousAnnotation
else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setOriginal(Apply(typedFun, args).setPos(ann.pos))
}
- } else if (requireJava) {
- reportAnnotationError(NestedAnnotationError(ann, annType))
- } else {
+ }
+ else {
val typedAnn = if (selfsym == NoSymbol) {
// local dummy fixes SI-5544
val localTyper = newTyper(context.make(ann, context.owner.newLocalDummy(ann.pos)))
- localTyper.typed(ann, mode, annClass.tpe)
- } else {
- // Since a selfsym is supplied, the annotation should have
- // an extra "self" identifier in scope for type checking.
- // This is implemented by wrapping the rhs
- // in a function like "self => rhs" during type checking,
- // and then stripping the "self =>" and substituting
- // in the supplied selfsym.
+ localTyper.typed(ann, mode, annType)
+ }
+ else {
+ // Since a selfsym is supplied, the annotation should have an extra
+ // "self" identifier in scope for type checking. This is implemented
+ // by wrapping the rhs in a function like "self => rhs" during type
+ // checking, and then stripping the "self =>" and substituting in
+ // the supplied selfsym.
val funcparm = ValDef(NoMods, nme.self, TypeTree(selfsym.info), EmptyTree)
- val func = Function(List(funcparm), ann.duplicate)
- // The .duplicate of annot.constr
- // deals with problems that
- // accur if this annotation is
- // later typed again, which
- // the compiler sometimes does.
- // The problem is that "self"
- // ident's within annot.constr
- // will retain the old symbol
- // from the previous typing.
- val fun1clazz = FunctionClass(1)
- val funcType = typeRef(fun1clazz.tpe.prefix,
- fun1clazz,
- List(selfsym.info, annClass.tpe))
-
- (typed(func, mode, funcType): @unchecked) match {
- case t @ Function(List(arg), rhs) =>
- val subs =
- new TreeSymSubstituter(List(arg.symbol),List(selfsym))
- subs(rhs)
+ // The .duplicate of annot.constr deals with problems that accur
+ // if this annotation is later typed again, which the compiler
+ // sometimes does. The problem is that "self" ident's within
+ // annot.constr will retain the old symbol from the previous typing.
+ val func = Function(funcparm :: Nil, ann.duplicate)
+ val funcType = appliedType(FunctionClass(1), selfsym.info, annType)
+ val Function(arg :: Nil, rhs) = typed(func, mode, funcType)
+
+ rhs.substituteSymbols(arg.symbol :: Nil, selfsym :: Nil)
}
- }
def annInfo(t: Tree): AnnotationInfo = t match {
case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) =>
@@ -3760,22 +3722,16 @@ trait Typers extends Modes with Adaptations with Tags {
if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2)
unit.deprecationWarning(ann.pos, "@deprecated now takes two arguments; see the scaladoc.")
- if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) annotationError
+ if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation
else annInfo(typedAnn)
+ })
}
- }
-
- if (hasError) {
- pending.foreach(ErrorUtils.issueTypeError)
- annotationError
- } else res
- }
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
- def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
+ def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = (
ctx.owner.isTerm &&
(ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) ||
{
@@ -3783,13 +3739,15 @@ trait Typers extends Modes with Adaptations with Tags {
while ((ctx1 != NoContext) && (ctx1.scope eq ctx.scope)) ctx1 = ctx1.outer
(ctx1 != NoContext) && isReferencedFrom(ctx1, sym)
}
+ )
- def isCapturedExistential(sym: Symbol) =
- (sym hasAllFlags (EXISTENTIAL | CAPTURED)) && {
- val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
- try !isReferencedFrom(context, sym)
- finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
- }
+ def isCapturedExistential(sym: Symbol) = (
+ (sym hasAllFlags EXISTENTIAL | CAPTURED) && {
+ val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null
+ try !isReferencedFrom(context, sym)
+ finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start)
+ }
+ )
def packCaptured(tpe: Type): Type = {
val captured = mutable.Set[Symbol]()
@@ -3820,7 +3778,8 @@ trait Typers extends Modes with Adaptations with Tags {
else containsDef(owner, sym) || isRawParameter(sym) || isCapturedExistential(sym)
def containsLocal(tp: Type): Boolean =
tp exists (t => isLocal(t.typeSymbol) || isLocal(t.termSymbol))
- val normalizeLocals = new TypeMap {
+
+ val dealiasLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
if (sym.isAliasType && containsLocal(tp)) apply(tp.dealias)
@@ -3873,25 +3832,25 @@ trait Typers extends Modes with Adaptations with Tags {
for (sym <- remainingSyms) addLocals(sym.existentialBound)
}
- val normalizedTpe = normalizeLocals(tree.tpe)
- addLocals(normalizedTpe)
- packSymbols(localSyms.toList, normalizedTpe)
+ val dealiasedType = dealiasLocals(tree.tpe)
+ addLocals(dealiasedType)
+ packSymbols(localSyms.toList, dealiasedType)
}
def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) =
if (!checkClassType(tpt) && noGen) tpt
else atPos(tree.pos)(gen.mkClassOf(tpt.tpe))
- protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = {
+ protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Mode): Tree = {
for (wc <- tree.whereClauses)
if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL }
else context.scope enter wc.symbol
val whereClauses1 = typedStats(tree.whereClauses, context.owner)
- for (vd @ ValDef(_, _, _, _) <- tree.whereClauses)
+ for (vd @ ValDef(_, _, _, _) <- whereClauses1)
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => {
+ existentialTransform(whereClauses1 map (_.symbol), tpt1.tpe)((tparams, tp) => {
val original = tpt1 match {
case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
case _ => {
@@ -3905,7 +3864,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
// lifted out of typed1 because it's needed in typedImplicit0
- protected def typedTypeApply(tree: Tree, mode: Int, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
+ protected def typedTypeApply(tree: Tree, mode: Mode, fun: Tree, args: List[Tree]): Tree = fun.tpe match {
case OverloadedType(pre, alts) =>
inferPolyAlternatives(fun, args map (_.tpe))
val tparams = fun.symbol.typeParams //@M TODO: fun.symbol.info.typeParams ? (as in typedAppliedTypeTree)
@@ -3914,7 +3873,7 @@ trait Typers extends Modes with Adaptations with Tags {
// as we don't know which alternative to choose... here we do
map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyTpe))
}
} else // @M: there's probably something wrong when args.length != tparams.length... (triggered by bug #320)
// Martin, I'm using fake trees, because, if you use args or arg.map(typedType),
@@ -3930,12 +3889,12 @@ trait Typers extends Modes with Adaptations with Tags {
val targs = args map (_.tpe)
checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "")
if (fun.symbol == Predef_classOf)
- typedClassOf(tree, args.head, true)
+ typedClassOf(tree, args.head, noGen = true)
else {
if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) {
val scrutineeType = fun match {
case Select(qual, _) => qual.tpe
- case _ => AnyClass.tpe
+ case _ => AnyTpe
}
checkCheckable(tree, targs.head, scrutineeType, inPattern = false)
}
@@ -3994,7 +3953,7 @@ trait Typers extends Modes with Adaptations with Tags {
// else false
}
- def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Int, pt: Type): Tree = {
+ def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = {
def argToBinding(arg: Tree): Tree = arg match {
case AssignOrNamedArg(Ident(name), rhs) => gen.mkTuple(List(CODE.LIT(name.toString), rhs))
case _ => gen.mkTuple(List(CODE.LIT(""), arg))
@@ -4039,20 +3998,20 @@ trait Typers extends Modes with Adaptations with Tags {
def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic
def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection
- /** Note that the trees which arrive here are potentially some distance from
- * the trees of direct interest. `cxTree` is some enclosing expression which
- * may apparently be arbitrarily larger than `tree`; and `tree` itself is
- * too small, having at least in some cases lost its explicit type parameters.
- * This logic is designed to use `tree` to pinpoint the immediately surrounding
- * Apply/TypeApply/Select node, and only then creates the dynamic call.
- * See SI-6731 among others.
+ /* Note that the trees which arrive here are potentially some distance from
+ * the trees of direct interest. `cxTree` is some enclosing expression which
+ * may apparently be arbitrarily larger than `tree`; and `tree` itself is
+ * too small, having at least in some cases lost its explicit type parameters.
+ * This logic is designed to use `tree` to pinpoint the immediately surrounding
+ * Apply/TypeApply/Select node, and only then creates the dynamic call.
+ * See SI-6731 among others.
*/
def findSelection(t: Tree): Option[(TermName, Tree)] = t match {
case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None
case Apply(fn, args) if matches(fn) => Some((applyOp(args), fn))
case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs))
case _ if matches(t) => Some((nme.selectDynamic, t))
- case _ => t.children flatMap findSelection headOption
+ case _ => (t.children flatMap findSelection).headOption
}
findSelection(cxTree) match {
case Some((opName, treeInfo.Applied(_, targs, _))) =>
@@ -4064,13 +4023,9 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = {
- silent(typeTree) match {
- case SilentResultValue(r) => r
- case SilentTypeError(err) => DynamicRewriteError(tree, err)
+ def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree =
+ silent(typeTree) orElse (err => DynamicRewriteError(tree, err))
}
- }
- }
final def deindentTyping() = context.typingIndentLevel -= 2
final def indentTyping() = context.typingIndentLevel += 2
@@ -4083,22 +4038,29 @@ trait Typers extends Modes with Adaptations with Tags {
println(s)
}
- def typed1(tree: Tree, mode: Int, pt: Type): Tree = {
- def isPatternMode = inPatternMode(mode)
+ def typed1(tree: Tree, mode: Mode, pt: Type): Tree = {
+ // Lookup in the given class using the root mirror.
+ def lookupInOwner(owner: Symbol, name: Name): Symbol =
+ if (mode.inQualMode) rootMirror.missingHook(owner, name) else NoSymbol
+
+ // Lookup in the given qualifier. Used in last-ditch efforts by typedIdent and typedSelect.
+ def lookupInRoot(name: Name): Symbol = lookupInOwner(rootMirror.RootClass, name)
+ def lookupInEmpty(name: Name): Symbol = rootMirror.EmptyPackageClass.info member name
- //Console.println("typed1("+tree.getClass()+","+Integer.toHexString(mode)+","+pt+")")
- //@M! get the type of the qualifier in a Select tree, otherwise: NoType
- def prefixType(fun: Tree): Type = fun match {
- case Select(qualifier, _) => qualifier.tpe
-// case Ident(name) => ??
- case _ => NoType
+ def lookupInQualifier(qual: Tree, name: Name): Symbol = (
+ if (name == nme.ERROR || qual.tpe.widen.isErroneous)
+ NoSymbol
+ else lookupInOwner(qual.tpe.typeSymbol, name) orElse {
+ NotAMemberError(tree, qual, name)
+ NoSymbol
}
+ )
def typedAnnotated(atd: Annotated): Tree = {
val ann = atd.annot
val arg1 = typed(atd.arg, mode, pt)
- /** mode for typing the annotation itself */
- val annotMode = mode & ~TYPEmode | EXPRmode
+ /* mode for typing the annotation itself */
+ val annotMode = (mode &~ TYPEmode) | EXPRmode
def resultingTypeTree(tpe: Type) = {
// we need symbol-ful originals for reification
@@ -4117,7 +4079,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (ann.tpe == null) {
// an annotated type
val selfsym =
- if (!settings.selfInAnnots.value)
+ if (!settings.selfInAnnots)
NoSymbol
else
arg1.tpe.selfsym orElse {
@@ -4153,7 +4115,7 @@ trait Typers extends Modes with Adaptations with Tags {
// Erroneous annotations were already reported in typedAnnotation
arg1 // simply drop erroneous annotations
else {
- ann.tpe = atype
+ ann setType atype
resultingTypeTree(atype)
}
} else {
@@ -4164,7 +4126,7 @@ trait Typers extends Modes with Adaptations with Tags {
else {
if (ann.tpe == null) {
val annotInfo = typedAnnotation(ann, annotMode)
- ann.tpe = arg1.tpe.withAnnotation(annotInfo)
+ ann setType arg1.tpe.withAnnotation(annotInfo)
}
val atype = ann.tpe
Typed(arg1, resultingTypeTree(atype)) setPos tree.pos setType atype
@@ -4188,7 +4150,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (name != tpnme.WILDCARD) namer.enterInScope(sym)
else context.scope.enter(sym)
- tree setSymbol sym setType sym.tpe
+ tree setSymbol sym setType sym.tpeHK
case name: TermName =>
val sym =
@@ -4196,7 +4158,9 @@ trait Typers extends Modes with Adaptations with Tags {
else context.owner.newValue(name, tree.pos)
if (name != nme.WILDCARD) {
- if ((mode & ALTmode) != 0) VariableInPatternAlternativeError(tree)
+ if (context.inPatAlternative)
+ VariableInPatternAlternativeError(tree)
+
namer.enterInScope(sym)
}
@@ -4221,15 +4185,15 @@ trait Typers extends Modes with Adaptations with Tags {
def typedArrayValue(tree: ArrayValue) = {
val elemtpt1 = typedType(tree.elemtpt, mode)
- val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
- treeCopy.ArrayValue(tree, elemtpt1, elems1)
- .setType(
- (if (isFullyDefined(pt) && !phase.erasedTypes) pt
- else arrayType(elemtpt1.tpe)).notNull)
+ val elems1 = tree.elems mapConserve (elem => typed(elem, mode, elemtpt1.tpe))
+ // see run/t6126 for an example where `pt` does not suffice (tagged types)
+ val tpe1 = if (isFullyDefined(pt) && !phase.erasedTypes) pt else arrayType(elemtpt1.tpe)
+
+ treeCopy.ArrayValue(tree, elemtpt1, elems1) setType tpe1
}
def typedAssign(lhs: Tree, rhs: Tree): Tree = {
- val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType)
+ val lhs1 = typed(lhs, EXPRmode | LHSmode)
val varsym = lhs1.symbol
// see #2494 for double error message example
@@ -4243,7 +4207,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (treeInfo.mayBeVarGetter(varsym)) {
lhs1 match {
case treeInfo.Applied(Select(qual, name), _, _) =>
- val sel = Select(qual, nme.getterToSetter(name.toTermName)) setPos lhs.pos
+ val sel = Select(qual, name.setterName) setPos lhs.pos
val app = Apply(sel, List(rhs)) setPos tree.pos
return typed(app, mode, pt)
@@ -4254,52 +4218,54 @@ trait Typers extends Modes with Adaptations with Tags {
// // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?!
// (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) {
if (varsym.isVariable || varsym.isValue && phase.erasedTypes) {
- val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe)
- treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe
+ val rhs1 = typedByValueExpr(rhs, lhs1.tpe)
+ treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe
}
else if(dyna.isDynamicallyUpdatable(lhs1)) {
- val rhs1 = typed(rhs, EXPRmode | BYVALmode, WildcardType)
+ val rhs1 = typedByValueExpr(rhs)
val t = Apply(lhs1, List(rhs1))
dyna.wrapErrors(t, _.typed1(t, mode, pt))
}
else fail()
}
- def typedIf(tree: If) = {
- val cond1 = checkDead(typed(tree.cond, EXPRmode | BYVALmode, BooleanClass.tpe))
- val thenp = tree.thenp
- val elsep = tree.elsep
- if (elsep.isEmpty) { // in the future, should be unnecessary
- val thenp1 = typed(thenp, UnitClass.tpe)
- treeCopy.If(tree, cond1, thenp1, elsep) setType thenp1.tpe
- } else {
- var thenp1 = typed(thenp, pt)
- var elsep1 = typed(elsep, pt)
- def thenTp = packedType(thenp1, context.owner)
- def elseTp = packedType(elsep1, context.owner)
-
- // println("typedIf: "+(thenp1.tpe, elsep1.tpe, ptOrLub(List(thenp1.tpe, elsep1.tpe)),"\n", thenTp, elseTp, thenTp =:= elseTp))
- val (owntype, needAdapt) =
- // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
- // in the special (though common) case where the types are equal, it pays to pack before comparing
- // especially virtpatmat needs more aggressive unification of skolemized types
- // this breaks src/library/scala/collection/immutable/TrieIterator.scala
- if ( opt.virtPatmat && !isPastTyper
- && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
- && thenTp =:= elseTp
- ) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
- // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
- else ptOrLub(thenp1.tpe :: elsep1.tpe :: Nil, pt)
-
- if (needAdapt) { //isNumericValueType(owntype)) {
- thenp1 = adapt(thenp1, mode, owntype)
- elsep1 = adapt(elsep1, mode, owntype)
- }
- treeCopy.If(tree, cond1, thenp1, elsep1) setType owntype
- }
- }
-
- // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+ def typedIf(tree: If): If = {
+ val cond1 = checkDead(typedByValueExpr(tree.cond, BooleanTpe))
+ // One-legged ifs don't need a lot of analysis
+ if (tree.elsep.isEmpty)
+ return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe
+
+ val thenp1 = typed(tree.thenp, pt)
+ val elsep1 = typed(tree.elsep, pt)
+
+ // in principle we should pack the types of each branch before lubbing, but lub doesn't really work for existentials anyway
+ // in the special (though common) case where the types are equal, it pays to pack before comparing
+ // especially virtpatmat needs more aggressive unification of skolemized types
+ // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+ // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ def samePackedTypes = (
+ !isPastTyper
+ && thenp1.tpe.annotations.isEmpty
+ && elsep1.tpe.annotations.isEmpty
+ && packedType(thenp1, context.owner) =:= packedType(elsep1, context.owner)
+ )
+ def finish(ownType: Type) = treeCopy.If(tree, cond1, thenp1, elsep1) setType ownType
+ // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+ // @PP: This was doing the samePackedTypes check BEFORE the isFullyDefined check,
+ // which based on everything I see everywhere else was a bug. I reordered it.
+ if (isFullyDefined(pt))
+ finish(pt)
+ // Important to deconst, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
+ else thenp1.tpe.deconst :: elsep1.tpe.deconst :: Nil match {
+ case tp :: _ if samePackedTypes => finish(tp)
+ case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+ case tpes =>
+ val lub = weakLub(tpes)
+ treeCopy.If(tree, cond1, adapt(thenp1, mode, lub), adapt(elsep1, mode, lub)) setType lub
+ }
+ }
+
+ // When there's a suitable __match in scope, virtualize the pattern match
// otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
// empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
def typedVirtualizedMatch(tree: Match): Tree = {
@@ -4309,7 +4275,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (newPatternMatching && (pt.typeSymbol == PartialFunctionClass))
synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, tree, mode, pt)
else {
- val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
+ val arity = if (isFunctionType(pt)) pt.dealiasWiden.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
ValDef(Modifiers(PARAM | SYNTHETIC),
@@ -4336,9 +4302,9 @@ trait Typers extends Modes with Adaptations with Tags {
val DefDef(_, name, _, _, restpt, _) = enclMethod.tree
if (restpt.tpe eq null) {
ReturnWithoutTypeError(tree, enclMethod.owner)
- } else {
- context.enclMethod.returnsSeen = true
- val expr1: Tree = typed(expr, EXPRmode | BYVALmode | RETmode, restpt.tpe)
+ }
+ else {
+ val expr1 = context withinReturnExpr typedByValueExpr(expr, restpt.tpe)
// Warn about returning a value if no value can be returned.
if (restpt.tpe.typeSymbol == UnitClass) {
// The typing in expr1 says expr is Unit (it has already been coerced if
@@ -4348,7 +4314,7 @@ trait Typers extends Modes with Adaptations with Tags {
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe)
res.setType(tp)
}
}
@@ -4365,7 +4331,7 @@ trait Typers extends Modes with Adaptations with Tags {
// given a dealiased type.
val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
if (checkStablePrefixClassType(tpt0))
- if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) {
+ if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
notifyUndetparamsAdded(context.undetparams)
TypeTree().setOriginal(tpt0)
@@ -4374,8 +4340,8 @@ trait Typers extends Modes with Adaptations with Tags {
else tpt0
}
- /** If current tree <tree> appears in <val x(: T)? = <tree>>
- * return `tp with x.type' else return `tp`.
+ /* If current tree <tree> appears in <val x(: T)? = <tree>>
+ * return `tp with x.type' else return `tp`.
*/
def narrowRhs(tp: Type) = { val sym = context.tree.symbol
context.tree match {
@@ -4395,7 +4361,7 @@ trait Typers extends Modes with Adaptations with Tags {
NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
setError(tpt)
}
- else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable
+ else if (!( tp == sym.thisSym.tpe_* // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
@@ -4425,36 +4391,15 @@ trait Typers extends Modes with Adaptations with Tags {
else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case MethodType(formals, _) =>
if (isFunctionType(pt)) expr1
- else expr1 match {
- case Select(qual, name) if (forMSIL &&
- pt != WildcardType &&
- pt != ErrorType &&
- isSubType(pt, DelegateClass.tpe)) =>
- val scalaCaller = newScalaCaller(pt)
- addScalaCallerInfo(scalaCaller, expr1.symbol)
- val n: Name = scalaCaller.name
- val del = Ident(DelegateClass) setType DelegateClass.tpe
- val f = Select(del, n)
- //val f1 = TypeApply(f, List(Ident(pt.symbol) setType pt))
- val args: List[Tree] = if(expr1.symbol.isStatic) List(Literal(Constant(null)))
- else List(qual) // where the scala-method is located
- val rhs = Apply(f, args)
- typed(rhs)
- case _ =>
- adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
- }
+ else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case ErrorType =>
expr1
case _ =>
UnderscoreEtaError(expr1)
}
- /**
- * @param args ...
- * @return ...
- */
- def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = {
- val c = context.makeSilent(false)
+ def tryTypedArgs(args: List[Tree], mode: Mode): Option[List[Tree]] = {
+ val c = context.makeSilent(reportAmbiguousErrors = false)
c.retyping = true
try {
val res = newTyper(c).typedArgs(args, mode)
@@ -4463,16 +4408,14 @@ trait Typers extends Modes with Adaptations with Tags {
case ex: CyclicReference =>
throw ex
case te: TypeError =>
- // @H some of typer erros can still leak,
+ // @H some of typer errors can still leak,
// for instance in continuations
None
- } finally {
- c.flushBuffer()
}
}
- /** Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
- * insert an implicit conversion.
+ /* Try to apply function to arguments; if it does not work, try to convert Java raw to existentials, or try to
+ * insert an implicit conversion.
*/
def tryTypedApply(fun: Tree, args: List[Tree]): Tree = {
val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
@@ -4513,11 +4456,11 @@ trait Typers extends Modes with Adaptations with Tags {
tryTypedArgs(args, forArgMode(fun, mode)) match {
case Some(args1) =>
val qual1 =
- if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true)
+ if (!pt.isError) adaptToArguments(qual, name, args1, pt, reportAmbiguous = true, saveErrors = true)
else qual
if (qual1 ne qual) {
val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos
- return typed1(tree1, mode | SNDTRYmode, pt)
+ return context withinSecondTry typed1(tree1, mode, pt)
}
case _ => ()
}
@@ -4526,121 +4469,89 @@ trait Typers extends Modes with Adaptations with Tags {
setError(treeCopy.Apply(tree, fun, args))
}
- silent(_.doTypedApply(tree, fun, args, mode, pt)) match {
- case SilentResultValue(t) =>
- t
- case SilentTypeError(err) =>
- onError(err)
+ silent(_.doTypedApply(tree, fun, args, mode, pt)) orElse onError
}
- }
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
+ // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)`
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (args.isEmpty && stableApplication && isPatternMode) {
- // treat stable function applications f() as expressions.
- //
- // [JZ] According to Martin, this is related to the old pattern matcher, which
- // needs to typecheck after a the translation of `x.f` to `x.f()` in a prior
- // compilation phase. As part of SI-7377, this has been tightened with `args.isEmpty`,
- // but we should remove it altogether in Scala 2.11.
- typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
- } else {
- val funpt = if (isPatternMode) pt else WildcardType
- val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
- val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
-
- def onError(reportError: => Tree): Tree = {
- fun match {
- case Select(qual, name)
- if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
- val qual1 = typedQualifier(qual)
- if (treeInfo.isVariableOrGetter(qual1)) {
- if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
- convertToAssignment(fun, qual1, name, args)
- } else {
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- case _ =>
- if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
- reportError
- }
- }
- silent(_.typed(fun, forFunMode(mode), funpt),
- if ((mode & EXPRmode) != 0) false else context.ambiguousErrors,
- if ((mode & EXPRmode) != 0) tree else context.tree) match {
- case SilentResultValue(fun1) =>
- val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
- if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
- def isImplicitMethod(tpe: Type) = tpe match {
- case mt: MethodType => mt.isImplicit
- case _ => false
- }
- val useTry = (
- !isPastTyper
- && fun2.isInstanceOf[Select]
- && !isImplicitMethod(fun2.tpe)
- && ((fun2.symbol eq null) || !fun2.symbol.isConstructor)
- && (mode & (EXPRmode | SNDTRYmode)) == EXPRmode
- )
- val res =
- if (useTry) tryTypedApply(fun2, args)
- else doTypedApply(tree, fun2, args, mode, pt)
-
- /*
- if (fun2.hasSymbol && fun2.symbol.isConstructor && (mode & EXPRmode) != 0) {
- res.tpe = res.tpe.notNull
- }
- */
- // TODO: In theory we should be able to call:
- //if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) {
- // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this
- // by calling ArrayClass.info here (or some other place before specialize).
- if (fun2.symbol == Array_apply && !res.isErrorTyped) {
- val checked = gen.mkCheckInit(res)
- // this check is needed to avoid infinite recursion in Duplicators
- // (calling typed1 more than once for the same tree)
- if (checked ne res) typed { atPos(tree.pos)(checked) }
- else res
- } else
- res
- case SilentTypeError(err) =>
- onError({issue(err); setError(tree)})
- }
+ val funpt = if (mode.inPatternMode) pt else WildcardType
+ val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null
+ val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null
+
+ def onError(reportError: => Tree): Tree = fun match {
+ case Select(qual, name) if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) =>
+ val qual1 = typedQualifier(qual)
+ if (treeInfo.isVariableOrGetter(qual1)) {
+ if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart)
+ convertToAssignment(fun, qual1, name, args)
+ }
+ else {
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ case _ =>
+ if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart)
+ reportError
+ }
+ val silentResult = silent(
+ op = _.typed(fun, mode.forFunMode, funpt),
+ reportAmbiguousErrors = !mode.inExprMode && context.ambiguousErrors,
+ newtree = if (mode.inExprMode) tree else context.tree
+ )
+ silentResult match {
+ case SilentResultValue(fun1) =>
+ val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1
+ if (Statistics.canEnable) Statistics.incCounter(typedApplyCount)
+ val noSecondTry = (
+ isPastTyper
+ || context.inSecondTry
+ || (fun2.symbol ne null) && fun2.symbol.isConstructor
+ || isImplicitMethodType(fun2.tpe)
+ )
+ val isFirstTry = fun2 match {
+ case Select(_, _) => !noSecondTry && mode.inExprMode
+ case _ => false
+ }
+ if (isFirstTry)
+ tryTypedApply(fun2, args)
+ else
+ doTypedApply(tree, fun2, args, mode, pt)
+ case SilentTypeError(err) =>
+ onError({ issue(err); setError(tree) })
}
}
- def typedApply(tree: Apply) = {
- val fun = tree.fun
- val args = tree.args
- fun match {
- case Block(stats, expr) =>
- typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
- case _ =>
- normalTypedApply(tree, fun, args) match {
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe != null &&
- tpt.tpe.typeSymbol == ArrayClass &&
- args.length == 1 &&
- erasure.GenericArray.unapply(tpt.tpe).isDefined) => // !!! todo simplify by using extractor
- // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
- // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len), where Array HK gets applied (N-1) times
- // [Eugene] no more MaxArrayDims. ClassTags are flexible enough to allow creation of arrays of arbitrary dimensionality (w.r.t JVM restrictions)
- val Some((level, componentType)) = erasure.GenericArray.unapply(tpt.tpe)
- val tagType = List.iterate(componentType, level)(tpe => appliedType(ArrayClass.toTypeConstructor, List(tpe))).last
- atPos(tree.pos) {
- val tag = resolveClassTag(tree.pos, tagType)
- if (tag.isEmpty) MissingClassTagError(tree, tagType)
- else typed(new ApplyToImplicitArgs(Select(tag, nme.newArray), args))
+ // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len)
+ // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len)
+ // where Array HK gets applied (N-1) times
+ object ArrayInstantiation {
+ def unapply(tree: Apply) = tree match {
+ case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass =>
+ Some(tpt.tpe) collect {
+ case erasure.GenericArray(level, componentType) =>
+ val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res))
+
+ resolveClassTag(tree.pos, tagType) match {
+ case EmptyTree => MissingClassTagError(tree, tagType)
+ case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil))
}
- case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => //SI-5696
- TooManyArgumentListsForConstructor(tree)
- case tree1 =>
- tree1
}
+ case _ => None
}
}
+ def typedApply(tree: Apply) = tree match {
+ case Apply(Block(stats, expr), args) =>
+ typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt)
+ case Apply(fun, args) =>
+ normalTypedApply(tree, fun, args) match {
+ case ArrayInstantiation(tree1) => typed(tree1, mode, pt)
+ case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //SI-5696
+ case tree1 => tree1
+ }
+ }
+
def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = {
val prefix = name.toTermName stripSuffix nme.EQL
def mkAssign(vble: Tree): Tree =
@@ -4694,8 +4605,6 @@ trait Typers extends Modes with Adaptations with Tags {
case This(_) => qual1.symbol
case _ => qual1.tpe.typeSymbol
}
- //println(clazz+"/"+qual1.tpe.typeSymbol+"/"+qual1)
-
def findMixinSuper(site: Type): Type = {
var ps = site.parents filter (_.typeSymbol.name == mix)
if (ps.isEmpty)
@@ -4703,11 +4612,6 @@ trait Typers extends Modes with Adaptations with Tags {
if (ps.isEmpty) {
debuglog("Fatal: couldn't find site " + site + " in " + site.parents.map(_.typeSymbol.name))
if (phase.erasedTypes && context.enclClass.owner.isImplClass) {
- // println(qual1)
- // println(clazz)
- // println(site)
- // println(site.parents)
- // println(mix)
// the reference to super class got lost during erasure
restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class")
ErrorType
@@ -4725,7 +4629,7 @@ trait Typers extends Modes with Adaptations with Tags {
val owntype = (
if (!mix.isEmpty) findMixinSuper(clazz.tpe)
- else if ((mode & SUPERCONSTRmode) != 0) clazz.info.firstParent
+ else if (context.inSuperInit) clazz.info.firstParent
else intersectionType(clazz.info.parents)
)
treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype)
@@ -4739,14 +4643,28 @@ trait Typers extends Modes with Adaptations with Tags {
if (isStableContext(tree, mode, pt)) tree setType clazz.thisType else tree
}
- /** Attribute a selection where <code>tree</code> is <code>qual.name</code>.
- * <code>qual</code> is already attributed.
- *
- * @param qual ...
- * @param name ...
- * @return ...
+ /* Attribute a selection where `tree` is `qual.name`.
+ * `qual` is already attributed.
*/
def typedSelect(tree: Tree, qual: Tree, name: Name): Tree = {
+ val t = typedSelectInternal(tree, qual, name)
+ // Checking for OverloadedTypes being handed out after overloading
+ // resolution has already happened.
+ if (isPastTyper) t.tpe match {
+ case OverloadedType(pre, alts) =>
+ if (alts forall (s => (s.owner == ObjectClass) || (s.owner == AnyClass) || isPrimitiveValueClass(s.owner))) ()
+ else if (settings.debug) printCaller(
+ s"""|Select received overloaded type during $phase, but typer is over.
+ |If this type reaches the backend, we are likely doomed to crash.
+ |$t has these overloads:
+ |${alts map (s => " " + s.defStringSeenAs(pre memberType s)) mkString "\n"}
+ |""".stripMargin
+ )("")
+ case _ =>
+ }
+ t
+ }
+ def typedSelectInternal(tree: Tree, qual: Tree, name: Name): Tree = {
def asDynamicCall = dyna.mkInvoke(context.tree, tree, qual, name) map { t =>
dyna.wrapErrors(t, (_.typed1(t, mode, pt)))
}
@@ -4755,82 +4673,65 @@ trait Typers extends Modes with Adaptations with Tags {
// symbol not found? --> try to convert implicitly to a type that does have the required
// member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an
// xml member to StringContext, which in turn has an unapply[Seq] method)
- if (name != nme.CONSTRUCTOR && inExprModeOr(mode, PATTERNmode)) {
- val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, true, true)
+ if (name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) {
+ val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = true, saveErrors = true)
if ((qual1 ne qual) && !qual1.isErrorTyped)
return typed(treeCopy.Select(tree, qual1, name), mode, pt)
}
NoSymbol
}
if (phase.erasedTypes && qual.isInstanceOf[Super] && tree.symbol != NoSymbol)
- qual.tpe = tree.symbol.owner.tpe
+ qual setType tree.symbol.owner.tpe
if (!reallyExists(sym)) {
def handleMissing: Tree = {
- if (context.unit.isJava && name.isTypeName) {
- // SI-3120 Java uses the same syntax, A.B, to express selection from the
- // value A and from the type A. We have to try both.
- val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
- if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
- }
-
- // try to expand according to Dynamic rules.
- asDynamicCall foreach (x => return x)
-
- debuglog(
- "qual = " + qual + ":" + qual.tpe +
- "\nSymbol=" + qual.tpe.termSymbol + "\nsymbol-info = " + qual.tpe.termSymbol.info +
- "\nscope-id = " + qual.tpe.termSymbol.info.decls.hashCode() + "\nmembers = " + qual.tpe.members +
- "\nname = " + name + "\nfound = " + sym + "\nowner = " + context.enclClass.owner)
-
- def makeInteractiveErrorTree = {
- val tree1 = tree match {
- case Select(_, _) => treeCopy.Select(tree, qual, name)
- case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
- }
- setError(tree1)
- }
-
- if (name == nme.ERROR && forInteractive)
- return makeInteractiveErrorTree
-
- if (!qual.tpe.widen.isErroneous) {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(qual.tpe.typeSymbol, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
+ def errorTree = missingSelectErrorTree(tree, qual, name)
+ def asTypeSelection = (
+ if (context.unit.isJava && name.isTypeName) {
+ // SI-3120 Java uses the same syntax, A.B, to express selection from the
+ // value A and from the type A. We have to try both.
+ atPos(tree.pos)(gen.convertToSelectFromType(qual, name)) match {
+ case EmptyTree => None
+ case tree1 => Some(typed1(tree1, mode, pt))
+ }
}
- NotAMemberError(tree, qual, name)
- }
-
- if (forInteractive) makeInteractiveErrorTree else setError(tree)
+ else None
+ )
+ debuglog(s"""
+ |qual=$qual:${qual.tpe}
+ |symbol=${qual.tpe.termSymbol.defString}
+ |scope-id=${qual.tpe.termSymbol.info.decls.hashCode}
+ |members=${qual.tpe.members mkString ", "}
+ |name=$name
+ |found=$sym
+ |owner=${context.enclClass.owner}
+ """.stripMargin)
+
+ // 1) Try converting a term selection on a java class into a type selection.
+ // 2) Try expanding according to Dynamic rules.
+ // 3) Try looking up the name in the qualifier.
+ asTypeSelection orElse asDynamicCall getOrElse (lookupInQualifier(qual, name) match {
+ case NoSymbol => setError(errorTree)
+ case found => typed1(tree setSymbol found, mode, pt)
+ })
}
handleMissing
- } else {
+ }
+ else {
val tree1 = tree match {
case Select(_, _) => treeCopy.Select(tree, qual, name)
case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
}
val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match {
+ case SilentTypeError(err: AccessTypeError) =>
+ (tree1, Some(err))
case SilentTypeError(err) =>
- if (err.kind != ErrorKinds.Access) {
- context issue err
- return setError(tree)
- }
- else (tree1, Some(err))
+ context issue err
+ return setError(tree)
case SilentResultValue(treeAndPre) =>
(stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None)
}
- def isPotentialNullDeference() = {
- !isPastTyper &&
- !sym.isConstructor &&
- !(qual.tpe <:< NotNullClass.tpe) && !qual.tpe.isNotNull &&
- !(List(Any_isInstanceOf, Any_asInstanceOf) contains result.symbol) // null.is/as is not a dereference
- }
- // unit is null here sometimes; how are we to know when unit might be null? (See bug #2467.)
- if (settings.warnSelectNullable.value && isPotentialNullDeference && unit != null)
- unit.warning(tree.pos, "potential null pointer dereference: "+tree)
-
result match {
// could checkAccessible (called by makeAccessible) potentially have skipped checking a type application in qual?
case SelectFromTypeTree(qual@TypeTree(), name) if qual.tpe.typeArgs.nonEmpty => // TODO: somehow the new qual is not checked in refchecks
@@ -4845,7 +4746,7 @@ trait Typers extends Modes with Adaptations with Tags {
case _ if accessibleError.isDefined =>
// don't adapt constructor, SI-6074
val qual1 = if (name == nme.CONSTRUCTOR) qual
- else adaptToMemberWithArgs(tree, qual, name, mode, false, false)
+ else adaptToMemberWithArgs(tree, qual, name, mode, reportAmbiguous = false, saveErrors = false)
if (!qual1.isErrorTyped && (qual1 ne qual))
typed(Select(qual1, name) setPos tree.pos, mode, pt)
else
@@ -4860,320 +4761,115 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- def typedSelectOrSuperCall(tree: Select) = {
- val qual = tree.qualifier
- val name = tree.name
- qual match {
- case _: Super if name == nme.CONSTRUCTOR =>
- val qual1 =
- typed(qual, EXPRmode | QUALmode | POLYmode | SUPERCONSTRmode, WildcardType)
- // the qualifier type of a supercall constructor is its first parent class
- typedSelect(tree, qual1, nme.CONSTRUCTOR)
- case _ =>
- if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
- var qual1 = checkDead(typedQualifier(qual, mode))
- if (name.isTypeName) qual1 = checkStable(qual1)
-
- val tree1 = // temporarily use `filter` and an alternative for `withFilter`
- if (name == nme.withFilter)
- silent(_ => typedSelect(tree, qual1, name)) match {
- case SilentResultValue(result) =>
- result
- case _ =>
- silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match {
- case SilentResultValue(result2) =>
- unit.deprecationWarning(
- tree.pos, "`withFilter' method does not yet exist on " + qual1.tpe.widen +
- ", using `filter' method instead")
- result2
- case SilentTypeError(err) =>
- WithFilterError(tree, err)
- }
- }
- else
- typedSelect(tree, qual1, name)
-
- if (tree.isInstanceOf[PostfixSelect])
- checkFeature(tree.pos, PostfixOpsFeature, name.decode)
- if (tree1.symbol != null && tree1.symbol.isOnlyRefinementMember)
- checkFeature(tree1.pos, ReflectiveCallsFeature, tree1.symbol.toString)
-
- if (qual1.hasSymbolWhich(_.isRootPackage)) treeCopy.Ident(tree1, name)
- else tree1
+ // temporarily use `filter` as an alternative for `withFilter`
+ def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = {
+ def warn() = unit.deprecationWarning(tree.pos, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead")
+ silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ =>
+ silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match {
+ case SilentResultValue(res) => warn() ; res
+ case SilentTypeError(err) => WithFilterError(tree, err)
+ }
}
}
+ def typedSelectOrSuperCall(tree: Select) = tree match {
+ case Select(qual @ Super(_, _), nme.CONSTRUCTOR) =>
+ // the qualifier type of a supercall constructor is its first parent class
+ typedSelect(tree, typedSelectOrSuperQualifier(qual), nme.CONSTRUCTOR)
+ case Select(qual, name) =>
+ if (Statistics.canEnable) Statistics.incCounter(typedSelectCount)
+ val qualTyped = checkDead(typedQualifier(qual, mode))
+ val qualStableOrError = (
+ if (qualTyped.isErrorTyped || !name.isTypeName || treeInfo.admitsTypeSelection(qualTyped))
+ qualTyped
+ else
+ UnstableTreeError(qualTyped)
+ )
+ val tree1 = name match {
+ case nme.withFilter => tryWithFilterAndFilter(tree, qualStableOrError)
+ case _ => typedSelect(tree, qualStableOrError, name)
+ }
+ def sym = tree1.symbol
+ if (tree.isInstanceOf[PostfixSelect])
+ checkFeature(tree.pos, PostfixOpsFeature, name.decode)
+ if (sym != null && sym.isOnlyRefinementMember)
+ checkFeature(tree1.pos, ReflectiveCallsFeature, sym.toString)
+
+ qualStableOrError.symbol match {
+ case s: Symbol if s.isRootPackage => treeCopy.Ident(tree1, name)
+ case _ => tree1
+ }
+ }
- /** Attribute an identifier consisting of a simple name or an outer reference.
+ /* A symbol qualifies if:
+ * - it exists
+ * - it is not stale (stale symbols are made to disappear here)
+ * - if we are in a constructor pattern, method definitions do not qualify
+ * unless they are stable. Otherwise, 'case x :: xs' would find the :: method.
+ */
+ def qualifies(sym: Symbol) = (
+ sym.hasRawInfo
+ && reallyExists(sym)
+ && !(mode.typingConstructorPattern && sym.isMethod && !sym.isStable)
+ )
+
+ /* Attribute an identifier consisting of a simple name or an outer reference.
*
- * @param tree The tree representing the identifier.
- * @param name The name of the identifier.
- * Transformations: (1) Prefix class members with this.
- * (2) Change imported symbols to selections
+ * @param tree The tree representing the identifier.
+ * @param name The name of the identifier.
+ * Transformations: (1) Prefix class members with this.
+ * (2) Change imported symbols to selections
*/
def typedIdent(tree: Tree, name: Name): Tree = {
- var errorContainer: AbsTypeError = null
- def ambiguousError(msg: String) = {
- assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
- errorContainer = AmbiguousIdentError(tree, name, msg)
- }
- def identError(tree: AbsTypeError) = {
- assert(errorContainer == null, "Cannot set ambiguous error twice for identifier")
- errorContainer = tree
- }
-
- var defSym: Symbol = tree.symbol // the directly found symbol
- var pre: Type = NoPrefix // the prefix type of defSym, if a class member
- var qual: Tree = EmptyTree // the qualifier tree if transformed tree is a select
- var inaccessibleSym: Symbol = NoSymbol // the first symbol that was found but that was discarded
- // for being inaccessible; used for error reporting
- var inaccessibleExplanation: String = ""
-
- // If a special setting is given, the empty package will be checked as a
- // last ditch effort before failing. This method sets defSym and returns
- // true if a member of the given name exists.
- def checkEmptyPackage(): Boolean = {
- defSym = rootMirror.EmptyPackageClass.tpe.nonPrivateMember(name)
- defSym != NoSymbol
- }
- def startingIdentContext = (
- // ignore current variable scope in patterns to enforce linearity
- if ((mode & (PATTERNmode | TYPEPATmode)) == 0) context
- else context.outer
- )
- // A symbol qualifies if it exists and is not stale. Stale symbols
- // are made to disappear here. In addition,
- // if we are in a constructor of a pattern, we ignore all definitions
- // which are methods (note: if we don't do that
- // case x :: xs in class List would return the :: method)
- // unless they are stable or are accessors (the latter exception is for better error messages).
- def qualifies(sym: Symbol): Boolean = {
- sym.hasRawInfo && // this condition avoids crashing on self-referential pattern variables
- reallyExists(sym) &&
- ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR))
- }
-
- if (defSym == NoSymbol) {
- var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope
-
- var cx = startingIdentContext
- while (defSym == NoSymbol && cx != NoContext && (cx.scope ne null)) { // cx.scope eq null arises during FixInvalidSyms in Duplicators
- pre = cx.enclClass.prefix
- defEntry = cx.scope.lookupEntry(name)
- if ((defEntry ne null) && qualifies(defEntry.sym)) {
- // Right here is where SI-1987, overloading in package objects, can be
- // seen to go wrong. There is an overloaded symbol, but when referring
- // to the unqualified identifier from elsewhere in the package, only
- // the last definition is visible. So overloading mis-resolves and is
- // definition-order dependent, bad things. See run/t1987.scala.
- //
- // I assume the actual problem involves how/where these symbols are entered
- // into the scope. But since I didn't figure out how to fix it that way, I
- // catch it here by looking up package-object-defined symbols in the prefix.
- if (isInPackageObject(defEntry.sym, pre.typeSymbol)) {
- defSym = pre.member(defEntry.sym.name)
- if (defSym ne defEntry.sym) {
- qual = gen.mkAttributedQualifier(pre)
- log(sm"""
- | !!! Overloaded package object member resolved incorrectly.
- | prefix: $pre
- | Discarded: ${defEntry.sym.defString}
- | Using: ${defSym.defString}
- """)
- }
- }
- else
- defSym = defEntry.sym
- }
- else {
- cx = cx.enclClass
- val foundSym = pre.member(name) filter qualifies
- defSym = foundSym filter (context.isAccessible(_, pre, false))
- if (defSym == NoSymbol) {
- if ((foundSym ne NoSymbol) && (inaccessibleSym eq NoSymbol)) {
- inaccessibleSym = foundSym
- inaccessibleExplanation = analyzer.lastAccessCheckDetails
- }
- cx = cx.outer
- }
- }
- }
+ // setting to enable unqualified idents in empty package (used by the repl)
+ def inEmptyPackage = if (settings.exposeEmptyPackage) lookupInEmpty(name) else NoSymbol
- val symDepth = if (defEntry eq null) cx.depth
- else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
- var impSym: Symbol = NoSymbol // the imported symbol
- var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
+ def issue(err: AbsTypeError) = {
+ // Avoiding some spurious error messages: see SI-2388.
+ val suppress = reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)
+ if (!suppress)
+ ErrorUtils.issueTypeError(err)
- // Java: A single-type-import declaration d in a compilation unit c of package p
- // that imports a type named n shadows, throughout c, the declarations of:
- //
- // 1) any top level type named n declared in another compilation unit of p
- //
- // A type-import-on-demand declaration never causes any other declaration to be shadowed.
- //
- // Scala: Bindings of different kinds have a precedence defined on them:
- //
- // 1) Definitions and declarations that are local, inherited, or made available by a
- // package clause in the same compilation unit where the definition occurs have
- // highest precedence.
- // 2) Explicit imports have next highest precedence.
- def depthOk(imp: ImportInfo) = (
- imp.depth > symDepth
- || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symDepth)
- )
- while (!reallyExists(impSym) && !imports.isEmpty && depthOk(imports.head)) {
- impSym = imports.head.importedSymbol(name)
- if (!impSym.exists) imports = imports.tail
- }
-
- // detect ambiguous definition/import,
- // update `defSym` to be the final resolved symbol,
- // update `pre` to be `sym`s prefix type in case it is an imported member,
- // and compute value of:
-
- if (defSym.exists && impSym.exists) {
- // imported symbols take precedence over package-owned symbols in different
- // compilation units. Defined symbols take precedence over erroneous imports.
- if (defSym.isDefinedInPackage &&
- (!currentRun.compiles(defSym) ||
- context.unit.exists && defSym.sourceFile != context.unit.source.file))
- defSym = NoSymbol
- else if (impSym.isError || impSym.name == nme.CONSTRUCTOR)
- impSym = NoSymbol
- }
- if (defSym.exists) {
- if (impSym.exists)
- ambiguousError(
- "it is both defined in "+defSym.owner +
- " and imported subsequently by \n"+imports.head)
- else if (!defSym.owner.isClass || defSym.owner.isPackageClass || defSym.isTypeParameterOrSkolem)
- pre = NoPrefix
- else
- qual = atPos(tree.pos.focusStart)(gen.mkAttributedQualifier(pre))
- } else {
- if (impSym.exists) {
- var impSym1: Symbol = NoSymbol
- var imports1 = imports.tail
-
- /** It's possible that seemingly conflicting identifiers are
- * identifiably the same after type normalization. In such cases,
- * allow compilation to proceed. A typical example is:
- * package object foo { type InputStream = java.io.InputStream }
- * import foo._, java.io._
- */
- def ambiguousImport() = {
- // The types of the qualifiers from which the ambiguous imports come.
- // If the ambiguous name is a value, these must be the same.
- def t1 = imports.head.qual.tpe
- def t2 = imports1.head.qual.tpe
- // The types of the ambiguous symbols, seen as members of their qualifiers.
- // If the ambiguous name is a monomorphic type, we can relax this far.
- def mt1 = t1 memberType impSym
- def mt2 = t2 memberType impSym1
- def characterize = List(
- s"types: $t1 =:= $t2 ${t1 =:= t2} members: ${mt1 =:= mt2}",
- s"member type 1: $mt1",
- s"member type 2: $mt2",
- s"$impSym == $impSym1 ${impSym == impSym1}",
- s"${impSym.debugLocationString} ${impSym.getClass}",
- s"${impSym1.debugLocationString} ${impSym1.getClass}"
- ).mkString("\n ")
-
- // The symbol names are checked rather than the symbols themselves because
- // each time an overloaded member is looked up it receives a new symbol.
- // So foo.member("x") != foo.member("x") if x is overloaded. This seems
- // likely to be the cause of other bugs too...
- if (t1 =:= t2 && impSym.name == impSym1.name)
- log(s"Suppressing ambiguous import: $t1 =:= $t2 && $impSym == $impSym1")
- // Monomorphism restriction on types is in part because type aliases could have the
- // same target type but attach different variance to the parameters. Maybe it can be
- // relaxed, but doesn't seem worth it at present.
- else if (mt1 =:= mt2 && name.isTypeName && impSym.isMonomorphicType && impSym1.isMonomorphicType)
- log(s"Suppressing ambiguous import: $mt1 =:= $mt2 && $impSym and $impSym1 are equivalent")
- else {
- log(s"Import is genuinely ambiguous:\n " + characterize)
- ambiguousError(s"it is imported twice in the same scope by\n${imports.head}\nand ${imports1.head}")
- }
- }
- while (errorContainer == null && !imports1.isEmpty &&
- (!imports.head.isExplicitImport(name) ||
- imports1.head.depth == imports.head.depth)) {
- impSym1 = imports1.head.importedSymbol(name)
- if (reallyExists(impSym1)) {
- if (imports1.head.isExplicitImport(name)) {
- if (imports.head.isExplicitImport(name) ||
- imports1.head.depth != imports.head.depth) ambiguousImport()
- impSym = impSym1
- imports = imports1
- } else if (!imports.head.isExplicitImport(name) &&
- imports1.head.depth == imports.head.depth) ambiguousImport()
- }
- imports1 = imports1.tail
- }
- defSym = impSym
- val qual0 = imports.head.qual
- if (!(shortenImports && qual0.symbol.isPackage)) // optimization: don't write out package prefixes
- qual = atPos(tree.pos.focusStart)(resetPos(qual0.duplicate))
- pre = qual.tpe
- }
- else if (settings.exposeEmptyPackage.value && checkEmptyPackage())
- log("Allowing empty package member " + name + " due to settings.")
- else {
- if ((mode & QUALmode) != 0) {
- val lastTry = rootMirror.missingHook(rootMirror.RootClass, name)
- if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt)
- }
- if (settings.debug.value) {
- log(context.imports)//debug
- }
- if (inaccessibleSym eq NoSymbol) {
- // Avoiding some spurious error messages: see SI-2388.
- if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) ()
- else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext))
- } else
- identError(InferErrorGen.AccessError(
- tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner,
- inaccessibleExplanation
- ))
- defSym = context.owner.newErrorSymbol(name)
- }
- }
- }
- if (errorContainer != null) {
- ErrorUtils.issueTypeError(errorContainer)
setError(tree)
- } else {
- if (defSym.owner.isPackageClass)
- pre = defSym.owner.thisType
-
- // Inferring classOf type parameter from expected type.
- if (defSym.isThisSym) {
- typed1(This(defSym.owner) setPos tree.pos, mode, pt)
- }
+ }
+ // ignore current variable scope in patterns to enforce linearity
+ val startContext = if (mode.typingPatternOrTypePat) context.outer else context
+ val nameLookup = tree.symbol match {
+ case NoSymbol => startContext.lookupSymbol(name, qualifies)
+ case sym => LookupSucceeded(EmptyTree, sym)
+ }
+ import InferErrorGen._
+ nameLookup match {
+ case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg))
+ case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg))
+ case LookupNotFound =>
+ inEmptyPackage orElse lookupInRoot(name) match {
+ case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext))
+ case sym => typed1(tree setSymbol sym, mode, pt)
+ }
+ case LookupSucceeded(qual, sym) =>
+ (// this -> Foo.this
+ if (sym.isThisSym)
+ typed1(This(sym.owner) setPos tree.pos, mode, pt)
// Inferring classOf type parameter from expected type. Otherwise an
// actual call to the stubbed classOf method is generated, returning null.
- else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
+ else if (isPredefMemberNamed(sym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty)
typedClassOf(tree, TypeTree(pt.typeArgs.head))
else {
- val tree1 = (
- if (qual == EmptyTree) tree
- // atPos necessary because qualifier might come from startContext
- else atPos(tree.pos)(Select(qual, name) setAttachments tree.attachments)
- )
- val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual)
- // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right?
- val tree3 = stabilize(tree2, pre2, mode, pt)
+ val pre1 = if (sym.isTopLevel) sym.owner.thisType else if (qual == EmptyTree) NoPrefix else qual.tpe
+ val tree1 = if (qual == EmptyTree) tree else atPos(tree.pos)(Select(atPos(tree.pos.focusStart)(qual), name))
+ val (tree2, pre2) = makeAccessible(tree1, sym, pre1, qual)
// SI-5967 Important to replace param type A* with Seq[A] when seen from from a reference, to avoid
// inference errors in pattern matching.
- tree3 setType dropRepeatedParamType(tree3.tpe)
+ stabilize(tree2, pre2, mode, pt) modifyType dropIllegalStarTypes
+ }) setAttachments tree.attachments
}
}
- }
def typedIdentOrWildcard(tree: Ident) = {
val name = tree.name
if (Statistics.canEnable) Statistics.incCounter(typedIdentCount)
- if ((name == nme.WILDCARD && (mode & (PATTERNmode | FUNmode)) == PATTERNmode) ||
- (name == tpnme.WILDCARD && (mode & TYPEmode) != 0))
+ if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) ||
+ (name == tpnme.WILDCARD && mode.inTypeMode))
tree setType makeFullyDefined(pt)
else
typedIdent(tree, name)
@@ -5200,40 +4896,65 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedAppliedTypeTree(tree: AppliedTypeTree) = {
- val tpt = tree.tpt
- val args = tree.args
- val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+ val tpt = tree.tpt
+ val args = tree.args
+ val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType)
+ def isPoly = tpt1.tpe.isInstanceOf[PolyType]
+ def isComplete = tpt1.symbol.rawInfo.isComplete
+
if (tpt1.isErrorTyped) {
tpt1
- } else if (!tpt1.hasSymbol) {
+ } else if (!tpt1.hasSymbolField) {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
val tparams = tpt1.symbol.typeParams
+
if (sameLength(tparams, args)) {
// @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer)
val args1 =
- if (!tpt1.symbol.rawInfo.isComplete)
+ if (!isComplete)
args mapConserve (typedHigherKindedType(_, mode))
// if symbol hasn't been fully loaded, can't check kind-arity
else map2Conserve(args, tparams) { (arg, tparam) =>
//@M! the polytype denotes the expected kind
- typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyTpe))
}
val argtypes = args1 map (_.tpe)
- foreach2(args, tparams)((arg, tparam) => arg match {
- // note: can't use args1 in selector, because Bind's got replaced
- case Bind(_, _) =>
- if (arg.symbol.isAbstractType)
- arg.symbol setInfo // XXX, feedback. don't trackSymInfo here!
- TypeBounds(
- lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))),
- glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes))))
- case _ =>
- })
+ foreach2(args, tparams) { (arg, tparam) =>
+ // note: can't use args1 in selector, because Binds got replaced
+ val asym = arg.symbol
+ def abounds = asym.info.bounds
+ def tbounds = tparam.info.bounds
+ def enhanceBounds(): Unit = {
+ val TypeBounds(lo0, hi0) = abounds
+ val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes)
+ val lo = lub(List(lo0, lo1))
+ val hi = glb(List(hi0, hi1))
+ if (!(lo =:= lo0 && hi =:= hi0))
+ asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi))
+ }
+ if (asym != null && asym.isAbstractType) {
+ // See pos/t1786 to follow what's happening here.
+ def canEnhanceIdent = (
+ asym.hasCompleteInfo
+ && tparam.exists /* sometimes it is NoSymbol */
+ && tparam.hasCompleteInfo /* SI-2940 */
+ && !tparam.isFBounded /* SI-2251 */
+ && !tparam.isHigherOrderTypeParameter
+ && !(abounds.hi <:< tbounds.hi)
+ && asym.isSynthetic /* this limits us to placeholder tparams, excluding named ones */
+ )
+ arg match {
+ case Bind(_, _) => enhanceBounds()
+ case Ident(name) if canEnhanceIdent => enhanceBounds()
+ case _ =>
+ }
+ }
+ }
val original = treeCopy.AppliedTypeTree(tree, tpt1, args1)
val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original
- if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
+ if (isPoly) // did the type application (performed by appliedType) involve an unchecked beta-reduction?
TypeTreeWithDeferredRefCheck(){ () =>
// wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap
// we can't simply use original in refchecks because it does not contains types
@@ -5246,7 +4967,7 @@ trait Typers extends Modes with Adaptations with Tags {
AppliedTypeNoParametersError(tree, tpt1.tpe)
} else {
//Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}")
- if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
+ if (settings.debug) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug
AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams)
}
}
@@ -5263,27 +4984,7 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.PackageDef(tree, pid1, stats1) setType NoType
}
- def typedDocDef(docdef: DocDef) = {
- if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
- val comment = docdef.comment
- fillDocComment(sym, comment)
- val typer1 = newTyper(context.makeNewScope(tree, context.owner))
- for (useCase <- comment.useCases) {
- typer1.silent(_.typedUseCase(useCase)) match {
- case SilentTypeError(err) =>
- unit.warning(useCase.pos, err.errMsg)
- case _ =>
- }
- for (useCaseSym <- useCase.defined) {
- if (sym.name != useCaseSym.name)
- unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
- }
- }
- }
- typed(docdef.definition, mode, pt)
- }
-
- /**
+ /*
* The typer with the correct context for a method definition. If the method is a default getter for
* a constructor default, the resulting typer has a constructor context (fixes SI-5543).
*/
@@ -5294,13 +4995,14 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedAlternative(alt: Alternative) = {
- val alts1 = alt.trees mapConserve (alt => typed(alt, mode | ALTmode, pt))
- treeCopy.Alternative(tree, alts1) setType pt
+ context withinPatAlternative (
+ treeCopy.Alternative(tree, alt.trees mapConserve (alt => typed(alt, mode, pt))) setType pt
+ )
}
-
def typedStar(tree: Star) = {
- if ((mode & STARmode) == 0 && !isPastTyper)
+ if (!context.starPatterns && !isPastTyper)
StarPatternWithVarargParametersError(tree)
+
treeCopy.Star(tree, typed(tree.elem, mode, pt)) setType makeFullyDefined(pt)
}
@@ -5311,35 +5013,54 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.UnApply(tree, fun1, args1) setType pt
}
- def typedTry(tree: Try) = {
- var block1 = typed(tree.block, pt)
- var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
-
- for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
- def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
+ def issueTryWarnings(tree: Try): Try = {
+ def checkForCatchAll(cdef: CaseDef) {
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
- cdef.pat match {
+ def warn(name: Name) = {
+ val msg = s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning."
+ context.warning(cdef.pat.pos, msg)
+ }
+ if (cdef.guard.isEmpty) cdef.pat match {
case Bind(name, i @ Ident(_)) if unbound(i) => warn(name)
- case i @ Ident(name) if unbound(i) => warn(name)
- case _ =>
+ case i @ Ident(name) if unbound(i) => warn(name)
+ case _ =>
}
}
-
- val finalizer1 =
- if (tree.finalizer.isEmpty) tree.finalizer
- else typed(tree.finalizer, UnitClass.tpe)
- val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)), pt)
- if (needAdapt) {
- block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, mode, owntype))
+ if (!isPastTyper) tree match {
+ case Try(_, Nil, fin) =>
+ if (fin eq EmptyTree)
+ context.warning(tree.pos, "A try without a catch or finally is equivalent to putting its body in a block; no exceptions are handled.")
+ case Try(_, catches, _) =>
+ catches foreach checkForCatchAll
}
+ tree
+ }
- treeCopy.Try(tree, block1, catches1, finalizer1) setType owntype
+ def typedTry(tree: Try) = {
+ val Try(block, catches, fin) = tree
+ val block1 = typed(block, pt)
+ val catches1 = typedCases(catches, ThrowableTpe, pt)
+ val fin1 = if (fin.isEmpty) fin else typed(fin, UnitTpe)
+
+ def finish(ownType: Type) = treeCopy.Try(tree, block1, catches1, fin1) setType ownType
+
+ issueTryWarnings(
+ if (isFullyDefined(pt))
+ finish(pt)
+ else block1 :: catches1 map (_.tpe.deconst) match {
+ case tpes if sameWeakLubAsLub(tpes) => finish(lub(tpes))
+ case tpes =>
+ val lub = weakLub(tpes)
+ val block2 = adapt(block1, mode, lub)
+ val catches2 = catches1 map (adaptCase(_, mode, lub))
+ treeCopy.Try(tree, block2, catches2, fin1) setType lub
+ }
+ )
}
def typedThrow(tree: Throw) = {
- val expr1 = typed(tree.expr, EXPRmode | BYVALmode, ThrowableClass.tpe)
- treeCopy.Throw(tree, expr1) setType NothingClass.tpe
+ val expr1 = typedByValueExpr(tree.expr, ThrowableTpe)
+ treeCopy.Throw(tree, expr1) setType NothingTpe
}
def typedTyped(tree: Typed) = {
@@ -5352,16 +5073,16 @@ trait Typers extends Modes with Adaptations with Tags {
// that typecheck must not trigger macro expansions, so we explicitly prohibit them
// however we cannot do `context.withMacrosDisabled`
// because `expr` might contain nested macro calls (see SI-6673)
- val exprTyped = typed1(expr updateAttachment SuppressMacroExpansionAttachment, mode, pt)
+ val exprTyped = typed1(suppressMacroExpansion(expr), mode, pt)
exprTyped match {
- case macroDef if macroDef.symbol != null && macroDef.symbol.isTermMacro && !macroDef.symbol.isErroneous =>
+ case macroDef if treeInfo.isMacroApplication(macroDef) =>
MacroEtaError(exprTyped)
case _ =>
typedEta(checkDead(exprTyped))
}
- case Ident(tpnme.WILDCARD_STAR) =>
- val exprTyped = typed(expr, onlyStickyModes(mode), WildcardType)
+ case t if treeInfo isWildcardStarType t =>
+ val exprTyped = typed(expr, mode.onlySticky)
def subArrayType(pt: Type) =
if (isPrimitiveValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt)
else {
@@ -5370,8 +5091,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
val (exprAdapted, baseClass) = exprTyped.tpe.typeSymbol match {
- case ArrayClass => (adapt(exprTyped, onlyStickyModes(mode), subArrayType(pt)), ArrayClass)
- case _ => (adapt(exprTyped, onlyStickyModes(mode), seqType(pt)), SeqClass)
+ case ArrayClass => (adapt(exprTyped, mode.onlySticky, subArrayType(pt)), ArrayClass)
+ case _ => (adapt(exprTyped, mode.onlySticky, seqType(pt)), SeqClass)
}
exprAdapted.tpe.baseType(baseClass) match {
case TypeRef(_, _, List(elemtp)) =>
@@ -5382,14 +5103,13 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
val tptTyped = typedType(tpt, mode)
- val exprTyped = typed(expr, onlyStickyModes(mode), tptTyped.tpe.deconst)
+ val exprTyped = typed(expr, mode.onlySticky, tptTyped.tpe.deconst)
val treeTyped = treeCopy.Typed(tree, exprTyped, tptTyped)
- if (isPatternMode) {
+ if (mode.inPatternMode) {
val uncheckedTypeExtractor = extractorForUncheckedType(tpt.pos, tptTyped.tpe)
-
// make fully defined to avoid bounded wildcard types that may be in pt from calling dropExistential (SI-2038)
- val ptDefined = if (isFullyDefined(pt)) pt else makeFullyDefined(pt)
+ val ptDefined = ensureFullyDefined(pt)
val ownType = inferTypedPattern(tptTyped, tptTyped.tpe, ptDefined, canRemedy = uncheckedTypeExtractor.nonEmpty)
treeTyped setType ownType
@@ -5414,7 +5134,7 @@ trait Typers extends Modes with Adaptations with Tags {
//val undets = context.undetparams
// @M: fun is typed in TAPPmode because it is being applied to its actual type parameters
- val fun1 = typed(fun, forFunMode(mode) | TAPPmode, WildcardType)
+ val fun1 = typed(fun, mode.forFunMode | TAPPmode)
val tparams = fun1.symbol.typeParams
//@M TODO: val undets_fun = context.undetparams ?
@@ -5426,7 +5146,7 @@ trait Typers extends Modes with Adaptations with Tags {
// @M maybe the well-kindedness check should be done when checking the type arguments conform to the type parameters' bounds?
val args1 = if (sameLength(args, tparams)) map2Conserve(args, tparams) {
//@M! the polytype denotes the expected kind
- (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyClass.tpe))
+ (arg, tparam) => typedHigherKindedType(arg, mode, GenPolyType(tparam.typeParams, AnyTpe))
}
else {
//@M this branch is correctly hit for an overloaded polymorphic type. It also has to handle erroneous cases.
@@ -5444,10 +5164,9 @@ trait Typers extends Modes with Adaptations with Tags {
def typedApplyDynamic(tree: ApplyDynamic) = {
assert(phase.erasedTypes)
- val reflectiveCalls = !(settings.refinementMethodDispatch.value == "invoke-dynamic")
- val qual1 = typed(tree.qual, AnyRefClass.tpe)
- val args1 = tree.args mapConserve (arg => if (reflectiveCalls) typed(arg, AnyRefClass.tpe) else typed(arg))
- treeCopy.ApplyDynamic(tree, qual1, args1) setType (if (reflectiveCalls) AnyRefClass.tpe else tree.symbol.info.resultType)
+ val qual1 = typed(tree.qual, AnyRefTpe)
+ val args1 = tree.args mapConserve (arg => typed(arg, AnyRefTpe))
+ treeCopy.ApplyDynamic(tree, qual1, args1) setType AnyRefTpe
}
def typedReferenceToBoxed(tree: ReferenceToBoxed) = {
@@ -5459,20 +5178,51 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.ReferenceToBoxed(tree, id1) setType tpe
}
+ // Warn about likely interpolated strings which are missing their interpolators
+ def warnMissingInterpolator(tree: Literal) {
+ // Unfortunately implicit not found strings looks for all the world like
+ // missing interpolators.
+ def isArgToImplicitNotFound = context.enclosingApply.tree match {
+ case Apply(fn, _) => fn.symbol.enclClass == ImplicitNotFoundClass
+ case _ => false
+ }
+ tree.value match {
+ case Constant(s: String) =>
+ def names = InterpolatorIdentRegex findAllIn s map (n => newTermName(n stripPrefix "$"))
+ def suspicious = (
+ (InterpolatorCodeRegex findFirstIn s).nonEmpty
+ || (names exists (n => context.lookupSymbol(n, _ => true).symbol.exists))
+ )
+ val noWarn = (
+ isArgToImplicitNotFound
+ || !(s contains ' ') // another heuristic - e.g. a string with only "$asInstanceOf"
+ )
+ if (!noWarn && suspicious)
+ unit.warning(tree.pos, "looks like an interpolated String; did you forget the interpolator?")
+ case _ =>
+ }
+ }
+
def typedLiteral(tree: Literal) = {
- val value = tree.value
+ if (settings.lint)
+ warnMissingInterpolator(tree)
+
tree setType (
- if (value.tag == UnitTag) UnitClass.tpe
- else ConstantType(value))
+ if (tree.value.tag == UnitTag) UnitTpe
+ else ConstantType(tree.value))
}
def typedSingletonTypeTree(tree: SingletonTypeTree) = {
- val ref1 = checkStable(
- context.withImplicitsDisabled(
- typed(tree.ref, EXPRmode | QUALmode | (mode & TYPEPATmode), AnyRefClass.tpe)
- )
- )
- tree setType ref1.tpe.resultType
+ val refTyped =
+ context.withImplicitsDisabled {
+ typed(tree.ref, MonoQualifierModes | mode.onlyTypePat, AnyRefTpe)
+ }
+
+ if (!refTyped.isErrorTyped)
+ tree setType refTyped.tpe.resultType
+
+ if (treeInfo.admitsTypeSelection(refTyped)) tree
+ else UnstableTreeError(refTyped)
}
def typedSelectFromTypeTree(tree: SelectFromTypeTree) = {
@@ -5482,8 +5232,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeBoundsTree(tree: TypeBoundsTree) = {
- val lo1 = typedType(tree.lo, mode)
- val hi1 = typedType(tree.hi, mode)
+ val lo1 = if (tree.lo.isEmpty) TypeTree(NothingTpe) else typedType(tree.lo, mode)
+ val hi1 = if (tree.hi.isEmpty) TypeTree(AnyTpe) else typedType(tree.hi, mode)
treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe)
}
@@ -5508,7 +5258,7 @@ trait Typers extends Modes with Adaptations with Tags {
// we should get here only when something before failed
// and we try again (@see tryTypedApply). In that case we can assign
// whatever type to tree; we just have to survive until a real error message is issued.
- tree setType AnyClass.tpe
+ tree setType AnyTpe
}
def typedFunction(fun: Function) = {
if (fun.symbol == NoSymbol)
@@ -5546,7 +5296,7 @@ trait Typers extends Modes with Adaptations with Tags {
case tree: TypeDef => typedTypeDef(tree)
case tree: LabelDef => labelTyper(tree).typedLabelDef(tree)
case tree: PackageDef => typedPackageDef(tree)
- case tree: DocDef => typedDocDef(tree)
+ case tree: DocDef => typedDocDef(tree, mode, pt)
case tree: Annotated => typedAnnotated(tree)
case tree: SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: SelectFromTypeTree => typedSelectFromTypeTree(tree)
@@ -5562,18 +5312,11 @@ trait Typers extends Modes with Adaptations with Tags {
case tree: ApplyDynamic => typedApplyDynamic(tree)
case tree: ReferenceToBoxed => typedReferenceToBoxed(tree)
case tree: TypeTreeWithDeferredRefCheck => tree // TODO: retype the wrapped tree? TTWDRC would have to change to hold the wrapped tree (not a closure)
- case tree: Import => assert(forInteractive, "!forInteractive") ; tree setType tree.symbol.tpe // should not happen in normal circumstances.
case _ => abort(s"unexpected tree: ${tree.getClass}\n$tree")
}
}
- /**
- * @param tree ...
- * @param mode ...
- * @param pt ...
- * @return ...
- */
- def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
lastTreeToTyper = tree
indentTyping()
@@ -5584,18 +5327,18 @@ trait Typers extends Modes with Adaptations with Tags {
try {
if (context.retyping &&
(tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
- tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
+ tree.clearType()
+ if (tree.hasSymbolField) tree.symbol = NoSymbol
}
val alreadyTyped = tree.tpe ne null
- var tree1: Tree = if (alreadyTyped) tree else {
+ val tree1: Tree = if (alreadyTyped) tree else {
printTyping(
ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
- "mode" -> modeString(mode),
+ "mode" -> mode,
"silent" -> context.bufferErrors,
"context.owner" -> context.owner
)
@@ -5614,7 +5357,7 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ tree1 modifyType (pluginsTyped(_, this, tree1, mode, ptPlugins))
val result =
if (tree1.isEmpty) tree1
else {
@@ -5631,14 +5374,14 @@ trait Typers extends Modes with Adaptations with Tags {
result
} catch {
case ex: TypeError =>
- tree.tpe = null
+ tree.clearType()
// The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere.
printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG
reportTypeError(context, tree.pos, ex)
setError(tree)
case ex: Exception =>
- if (settings.debug.value) // @M causes cyclic reference error
+ if (settings.debug) // @M causes cyclic reference error
Console.println("exception when typing "+tree+", pt = "+ptPlugins)
if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
@@ -5651,55 +5394,56 @@ trait Typers extends Modes with Adaptations with Tags {
}
def atOwner(owner: Symbol): Typer =
- newTyper(context.make(context.tree, owner))
+ newTyper(context.make(owner = owner))
def atOwner(tree: Tree, owner: Symbol): Typer =
newTyper(context.make(tree, owner))
- /** Types expression or definition <code>tree</code>.
- *
- * @param tree ...
- * @return ...
+ /** Types expression or definition `tree`.
*/
def typed(tree: Tree): Tree = {
- val ret = typed(tree, EXPRmode, WildcardType)
+ val ret = typed(tree, context.defaultModeForTyped, WildcardType)
ret
}
- def typedPos(pos: Position, mode: Int, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
+ def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt)
+
+ def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt)
def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree))
// TODO: see if this formulation would impose any penalty, since
// it makes for a lot less casting.
// def typedPos[T <: Tree](pos: Position)(tree: T): T = typed(atPos(pos)(tree)).asInstanceOf[T]
- /** Types expression <code>tree</code> with given prototype <code>pt</code>.
- *
- * @param tree ...
- * @param pt ...
- * @return ...
+ /** Types expression `tree` with given prototype `pt`.
*/
def typed(tree: Tree, pt: Type): Tree =
- typed(tree, EXPRmode, pt)
+ typed(tree, context.defaultModeForTyped, pt)
+
+ def typed(tree: Tree, mode: Mode): Tree =
+ typed(tree, mode, WildcardType)
- /** Types qualifier <code>tree</code> of a select node.
- * E.g. is tree occurs in a context like <code>tree.m</code>.
+ /** Types qualifier `tree` of a select node.
+ * E.g. is tree occurs in a context like `tree.m`.
*/
- def typedQualifier(tree: Tree, mode: Int, pt: Type): Tree =
- typed(tree, EXPRmode | QUALmode | POLYmode | mode & TYPEPATmode, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
+ def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree =
+ typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit
- /** Types qualifier <code>tree</code> of a select node.
- * E.g. is tree occurs in a context like <code>tree.m</code>.
+ /** Types qualifier `tree` of a select node.
+ * E.g. is tree occurs in a context like `tree.m`.
*/
- def typedQualifier(tree: Tree, mode: Int): Tree =
+ def typedQualifier(tree: Tree, mode: Mode): Tree =
typedQualifier(tree, mode, WildcardType)
def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType)
/** Types function part of an application */
- def typedOperator(tree: Tree): Tree =
- typed(tree, EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
+ def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes)
- /** Types a pattern with prototype <code>pt</code> */
+ // the qualifier type of a supercall constructor is its first parent class
+ private def typedSelectOrSuperQualifier(qual: Tree) =
+ context withinSuperInit typed(qual, PolyQualifierModes)
+
+ /** Types a pattern with prototype `pt` */
def typedPattern(tree: Tree, pt: Type): Tree = {
// We disable implicits because otherwise some constructs will
// type check which should not. The pattern matcher does not
@@ -5721,30 +5465,28 @@ trait Typers extends Modes with Adaptations with Tags {
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
- case pat => pat
+ case pat => pat
}
}
/** Types a (fully parameterized) type tree */
- def typedType(tree: Tree, mode: Int): Tree =
- typed(tree, forTypeMode(mode), WildcardType)
+ def typedType(tree: Tree, mode: Mode): Tree =
+ typed(tree, mode.forTypeMode, WildcardType)
/** Types a (fully parameterized) type tree */
def typedType(tree: Tree): Tree = typedType(tree, NOmode)
/** Types a higher-kinded type tree -- pt denotes the expected kind*/
- def typedHigherKindedType(tree: Tree, mode: Int, pt: Type): Tree =
+ def typedHigherKindedType(tree: Tree, mode: Mode, pt: Type): Tree =
if (pt.typeParams.isEmpty) typedType(tree, mode) // kind is known and it's *
- else typed(tree, HKmode, pt)
-
- def typedHigherKindedType(tree: Tree, mode: Int): Tree =
- typed(tree, HKmode, WildcardType)
+ else context withinTypeConstructorAllowed typed(tree, NOmode, pt)
- def typedHigherKindedType(tree: Tree): Tree = typedHigherKindedType(tree, NOmode)
+ def typedHigherKindedType(tree: Tree, mode: Mode): Tree =
+ context withinTypeConstructorAllowed typed(tree)
/** Types a type constructor tree used in a new or supertype */
- def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
- val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
+ def typedTypeConstructor(tree: Tree, mode: Mode): Tree = {
+ val result = typed(tree, mode.forTypeMode | FUNmode, WildcardType)
// get rid of type aliases for the following check (#1241)
result.tpe.dealias match {
@@ -5765,7 +5507,7 @@ trait Typers extends Modes with Adaptations with Tags {
def computeType(tree: Tree, pt: Type): Type = {
// macros employ different logic of `computeType`
- assert(!context.owner.isTermMacro, context.owner)
+ assert(!context.owner.isMacro, context.owner)
val tree1 = typed(tree, pt)
transformed(tree) = tree1
val tpe = packedType(tree1, context.owner)
@@ -5774,8 +5516,8 @@ trait Typers extends Modes with Adaptations with Tags {
}
def computeMacroDefType(tree: Tree, pt: Type): Type = {
- assert(context.owner.isTermMacro, context.owner)
- assert(tree.symbol.isTermMacro, tree.symbol)
+ assert(context.owner.isMacro, context.owner)
+ assert(tree.symbol.isMacro, tree.symbol)
assert(tree.isInstanceOf[DefDef], tree.getClass)
val ddef = tree.asInstanceOf[DefDef]
@@ -5793,40 +5535,29 @@ trait Typers extends Modes with Adaptations with Tags {
val isMacroBodyOkay = !tree.symbol.isErroneous && !(tree1 exists (_.isErroneous)) && tree1 != EmptyTree
val shouldInheritMacroImplReturnType = ddef.tpt.isEmpty
- if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImpl(ddef, tree1.symbol) else AnyClass.tpe
- }
-
- def transformedOr(tree: Tree, op: => Tree): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => op
+ if (isMacroBodyOkay && shouldInheritMacroImplReturnType) computeMacroDefTypeFromMacroImplRef(ddef, tree1) else AnyTpe
}
- def transformedOrTyped(tree: Tree, mode: Int, pt: Type): Tree = transformed.get(tree) match {
- case Some(tree1) => transformed -= tree; tree1
- case None => typed(tree, mode, pt)
+ def transformedOr(tree: Tree, op: => Tree): Tree = transformed remove tree match {
+ case Some(tree1) => tree1
+ case _ => op
}
-/*
- def convertToTypeTree(tree: Tree): Tree = tree match {
- case TypeTree() => tree
- case _ => TypeTree(tree.tpe)
+ def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = transformed remove tree match {
+ case Some(tree1) => tree1
+ case _ => typed(tree, mode, pt)
}
-*/
}
}
object TypersStats {
import scala.reflect.internal.TypesStats._
- import scala.reflect.internal.BaseTypeSeqsStats._
val typedIdentCount = Statistics.newCounter("#typechecked identifiers")
val typedSelectCount = Statistics.newCounter("#typechecked selections")
val typedApplyCount = Statistics.newCounter("#typechecked applications")
val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount)
val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount)
val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount)
- val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount)
- val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount)
- val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos)
val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos)
val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 31c5a61a8c..af3f772f79 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -23,7 +23,6 @@ trait Unapplies extends ast.TreeDSL
private val unapplyParamName = nme.x_0
-
// In the typeCompleter (templateSig) of a case class (resp it's module),
// synthetic `copy` (reps `apply`, `unapply`) methods are added. To compute
// their signatures, the corresponding ClassDef is needed. During naming (in
@@ -47,17 +46,6 @@ trait Unapplies extends ast.TreeDSL
}
}
- /** returns type of the unapply method returning T_0...T_n
- * for n == 0, boolean
- * for n == 1, Some[T0]
- * else Some[Product[Ti]]
- */
- def unapplyReturnTypeExpected(argsLength: Int) = argsLength match {
- case 0 => BooleanClass.tpe
- case 1 => optionType(WildcardType)
- case n => optionType(productType((List fill n)(WildcardType)))
- }
-
/** returns unapply or unapplySeq if available */
def unapplyMember(tp: Type): Symbol = (tp member nme.unapply) match {
case NoSymbol => tp member nme.unapplySeq
@@ -128,11 +116,16 @@ trait Unapplies extends ast.TreeDSL
/** The module corresponding to a case class; overrides toString to show the module's name
*/
def caseModuleDef(cdef: ClassDef): ModuleDef = {
- // > MaxFunctionArity is caught in Namers, but for nice error reporting instead of
- // an abrupt crash we trim the list here.
- def primaries = constrParamss(cdef).head take MaxFunctionArity map (_.tpt)
- def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && constrParamss(cdef).length == 1
- def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+ val params = constrParamss(cdef)
+ def inheritFromFun = !cdef.mods.hasAbstractFlag && cdef.tparams.isEmpty && (params match {
+ case List(ps) if ps.length <= MaxFunctionArity => true
+ case _ => false
+ })
+ def createFun = {
+ def primaries = params.head map (_.tpt)
+ gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
+ }
+
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
Modifiers(OVERRIDE | FINAL | SYNTHETIC),
@@ -149,7 +142,7 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
deleted file mode 100644
index ea436a71fb..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ /dev/null
@@ -1,94 +0,0 @@
-/* NSC -- new scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab.Flags.{ VarianceFlags => VARIANCES, _ }
-
-/** Variances form a lattice, 0 <= COVARIANT <= Variances, 0 <= CONTRAVARIANT <= VARIANCES
- */
-trait Variances {
-
- val global: Global
- import global._
-
- /** Flip between covariant and contravariant */
- private def flip(v: Int): Int = {
- if (v == COVARIANT) CONTRAVARIANT
- else if (v == CONTRAVARIANT) COVARIANT
- else v
- }
-
- /** Map everything below VARIANCES to 0 */
- private def cut(v: Int): Int =
- if (v == VARIANCES) v else 0
-
- /** Compute variance of type parameter `tparam` in types of all symbols `sym`. */
- def varianceInSyms(syms: List[Symbol])(tparam: Symbol): Int =
- (VARIANCES /: syms) ((v, sym) => v & varianceInSym(sym)(tparam))
-
- /** Compute variance of type parameter `tparam` in type of symbol `sym`. */
- def varianceInSym(sym: Symbol)(tparam: Symbol): Int =
- if (sym.isAliasType) cut(varianceInType(sym.info)(tparam))
- else varianceInType(sym.info)(tparam)
-
- /** Compute variance of type parameter `tparam` in all types `tps`. */
- def varianceInTypes(tps: List[Type])(tparam: Symbol): Int =
- (VARIANCES /: tps) ((v, tp) => v & varianceInType(tp)(tparam))
-
- /** Compute variance of type parameter `tparam` in all type arguments
- * <code>tps</code> which correspond to formal type parameters `tparams1`.
- */
- def varianceInArgs(tps: List[Type], tparams1: List[Symbol])(tparam: Symbol): Int = {
- var v: Int = VARIANCES;
- for ((tp, tparam1) <- tps zip tparams1) {
- val v1 = varianceInType(tp)(tparam)
- v = v & (if (tparam1.isCovariant) v1
- else if (tparam1.isContravariant) flip(v1)
- else cut(v1))
- }
- v
- }
-
- /** Compute variance of type parameter `tparam` in all type annotations `annots`. */
- def varianceInAttribs(annots: List[AnnotationInfo])(tparam: Symbol): Int = {
- (VARIANCES /: annots) ((v, annot) => v & varianceInAttrib(annot)(tparam))
- }
-
- /** Compute variance of type parameter `tparam` in type annotation `annot`. */
- def varianceInAttrib(annot: AnnotationInfo)(tparam: Symbol): Int = {
- varianceInType(annot.atp)(tparam)
- }
-
- /** Compute variance of type parameter <code>tparam</code> in type <code>tp</code>. */
- def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
- case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
- VARIANCES
- case BoundedWildcardType(bounds) =>
- varianceInType(bounds)(tparam)
- case SingleType(pre, sym) =>
- varianceInType(pre)(tparam)
- case TypeRef(pre, sym, args) =>
- if (sym == tparam) COVARIANT
- // tparam cannot occur in tp's args if tp is a type constructor (those don't have args)
- else if (tp.isHigherKinded) varianceInType(pre)(tparam)
- else varianceInType(pre)(tparam) & varianceInArgs(args, sym.typeParams)(tparam)
- case TypeBounds(lo, hi) =>
- flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
- case RefinedType(parents, defs) =>
- varianceInTypes(parents)(tparam) & varianceInSyms(defs.toList)(tparam)
- case MethodType(params, restpe) =>
- flip(varianceInSyms(params)(tparam)) & varianceInType(restpe)(tparam)
- case NullaryMethodType(restpe) =>
- varianceInType(restpe)(tparam)
- case PolyType(tparams, restpe) =>
- flip(varianceInSyms(tparams)(tparam)) & varianceInType(restpe)(tparam)
- case ExistentialType(tparams, restpe) =>
- varianceInSyms(tparams)(tparam) & varianceInType(restpe)(tparam)
- case AnnotatedType(annots, tp, _) =>
- varianceInAttribs(annots)(tparam) & varianceInType(tp)(tparam)
- }
-}
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index 471e2653cf..7f9b81e1ec 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -11,9 +11,9 @@ import java.net.URL
import scala.collection.{ mutable, immutable }
import io.{ File, Directory, Path, Jar, AbstractFile }
import scala.reflect.internal.util.StringOps.splitWhere
-import scala.reflect.ClassTag
import Jar.isJarOrZip
import File.pathSeparator
+import scala.collection.convert.WrapAsScala.enumerationAsScalaIterator
import java.net.MalformedURLException
import java.util.regex.PatternSyntaxException
@@ -29,14 +29,10 @@ object ClassPath {
private def expandS(pattern: String): List[String] = {
val wildSuffix = File.separator + "*"
- /** Get all subdirectories, jars, zips out of a directory. */
+ /* Get all subdirectories, jars, zips out of a directory. */
def lsDir(dir: Directory, filt: String => Boolean = _ => true) =
dir.list filter (x => filt(x.name) && (x.isDirectory || isJarOrZip(x))) map (_.path) toList
- def basedir(s: String) =
- if (s contains File.separator) s.substring(0, s.lastIndexOf(File.separator))
- else "."
-
if (pattern == "*") lsDir(Directory("."))
else if (pattern endsWith wildSuffix) lsDir(Directory(pattern dropRight 2))
else if (pattern contains '*') {
@@ -58,22 +54,6 @@ object ClassPath {
/** Split the classpath, apply a transformation function, and reassemble it. */
def map(cp: String, f: String => String): String = join(split(cp) map f: _*)
- /** Split the classpath, filter according to predicate, and reassemble. */
- def filter(cp: String, p: String => Boolean): String = join(split(cp) filter p: _*)
-
- /** Split the classpath and map them into Paths */
- def toPaths(cp: String): List[Path] = split(cp) map (x => Path(x).toAbsolute)
-
- /** Make all classpath components absolute. */
- def makeAbsolute(cp: String): String = fromPaths(toPaths(cp): _*)
-
- /** Join the paths as a classpath */
- def fromPaths(paths: Path*): String = join(paths map (_.path): _*)
- def fromURLs(urls: URL*): String = fromPaths(urls map (x => Path(x.getPath)) : _*)
-
- /** Split the classpath and map them into URLs */
- def toURLs(cp: String): List[URL] = toPaths(cp) map (_.toURL)
-
/** Expand path and possibly expanding stars */
def expandPath(path: String, expandStar: Boolean = true): List[String] =
if (expandStar) split(path) flatMap expandS
@@ -126,31 +106,29 @@ object ClassPath {
/** Creators for sub classpaths which preserve this context.
*/
def sourcesInPath(path: String): List[ClassPath[T]] =
- for (file <- expandPath(path, false) ; dir <- Option(AbstractFile getDirectory file)) yield
+ for (file <- expandPath(path, expandStar = false) ; dir <- Option(AbstractFile getDirectory file)) yield
new SourcePath[T](dir, this)
def contentsOfDirsInPath(path: String): List[ClassPath[T]] =
- for (dir <- expandPath(path, false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
+ for (dir <- expandPath(path, expandStar = false) ; name <- expandDir(dir) ; entry <- Option(AbstractFile getDirectory name)) yield
newClassPath(entry)
- def classesAtAllURLS(path: String): List[ClassPath[T]] =
- (path split " ").toList flatMap classesAtURL
-
- def classesAtURL(spec: String) =
- for (url <- specToURL(spec).toList ; location <- Option(AbstractFile getURL url)) yield
- newClassPath(location)
-
def classesInExpandedPath(path: String): IndexedSeq[ClassPath[T]] =
- classesInPathImpl(path, true).toIndexedSeq
+ classesInPathImpl(path, expand = true).toIndexedSeq
- def classesInPath(path: String) = classesInPathImpl(path, false)
+ def classesInPath(path: String) = classesInPathImpl(path, expand = false)
// Internal
private def classesInPathImpl(path: String, expand: Boolean) =
for (file <- expandPath(path, expand) ; dir <- Option(AbstractFile getDirectory file)) yield
newClassPath(dir)
+
+ def classesInManifest(used: Boolean) =
+ if (used) for (url <- manifests) yield newClassPath(AbstractFile getResources url) else Nil
}
+ def manifests = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF").filter(_.getProtocol() == "jar").toList
+
class JavaContext extends ClassPathContext[AbstractFile] {
def toBinaryName(rep: AbstractFile) = {
val name = rep.name
@@ -216,8 +194,7 @@ abstract class ClassPath[T] {
def sourcepaths: IndexedSeq[AbstractFile]
/**
- * Represents classes which can be loaded with a ClassfileLoader/MsilFileLoader
- * and / or a SourcefileLoader.
+ * Represents classes which can be loaded with a ClassfileLoader and/or SourcefileLoader.
*/
case class ClassRep(binary: Option[T], source: Option[AbstractFile]) {
def name: String = binary match {
@@ -239,7 +216,7 @@ abstract class ClassPath[T] {
* Does not support nested classes on .NET
*/
def findClass(name: String): Option[AnyClassRep] =
- splitWhere(name, _ == '.', true) match {
+ splitWhere(name, _ == '.', doDropIndex = true) match {
case Some((pkg, rest)) =>
val rep = packages find (_.name == pkg) flatMap (_ findClass rest)
rep map {
@@ -283,7 +260,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
else if (f.isDirectory && validPackage(f.name))
packageBuf += new SourcePath[T](f, context)
}
- (packageBuf.result, classBuf.result)
+ (packageBuf.result(), classBuf.result())
}
lazy val (packages, classes) = traverse()
@@ -296,7 +273,7 @@ class SourcePath[T](dir: AbstractFile, val context: ClassPathContext[T]) extends
class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[AbstractFile]) extends ClassPath[AbstractFile] {
def name = dir.name
override def origin = dir.underlyingSource map (_.path)
- def asURLs = if (dir.file == null) Nil else List(dir.toURL)
+ def asURLs = if (dir.file == null) List(new URL(name)) else List(dir.toURL)
def asClasspathString = dir.path
val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
@@ -304,13 +281,26 @@ class DirectoryClassPath(val dir: AbstractFile, val context: ClassPathContext[Ab
private def traverse() = {
val classBuf = immutable.Vector.newBuilder[ClassRep]
val packageBuf = immutable.Vector.newBuilder[DirectoryClassPath]
- dir foreach { f =>
- if (!f.isDirectory && validClassFile(f.name))
- classBuf += ClassRep(Some(f), None)
- else if (f.isDirectory && validPackage(f.name))
- packageBuf += new DirectoryClassPath(f, context)
+ dir foreach {
+ f =>
+ // Optimization: We assume the file was not changed since `dir` called
+ // `Path.apply` and categorized existent files as `Directory`
+ // or `File`.
+ val isDirectory = f match {
+ case pf: io.PlainFile => pf.givenPath match {
+ case _: io.Directory => true
+ case _: io.File => false
+ case _ => f.isDirectory
+ }
+ case _ =>
+ f.isDirectory
+ }
+ if (!isDirectory && validClassFile(f.name))
+ classBuf += ClassRep(Some(f), None)
+ else if (isDirectory && validPackage(f.name))
+ packageBuf += new DirectoryClassPath(f, context)
}
- (packageBuf.result, classBuf.result)
+ (packageBuf.result(), classBuf.result())
}
lazy val (packages, classes) = traverse()
@@ -408,15 +398,3 @@ class JavaClassPath(
containers: IndexedSeq[ClassPath[AbstractFile]],
context: JavaContext)
extends MergedClassPath[AbstractFile](containers, context) { }
-
-object JavaClassPath {
- def fromURLs(urls: Seq[URL], context: JavaContext): JavaClassPath = {
- val containers = {
- for (url <- urls ; f = AbstractFile getURL url ; if f != null) yield
- new DirectoryClassPath(f, context)
- }
- new JavaClassPath(containers.toIndexedSeq, context)
- }
- def fromURLs(urls: Seq[URL]): JavaClassPath =
- fromURLs(urls, ClassPath.DefaultJavaContext)
-}
diff --git a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
index 9cf2c535df..e8f962a9e2 100644
--- a/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
+++ b/src/compiler/scala/tools/nsc/util/CommandLineParser.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package util
import scala.util.parsing.combinator._
-import scala.util.parsing.input.{ Reader }
import scala.util.parsing.input.CharArrayReader.EofCh
import scala.collection.mutable.ListBuffer
@@ -22,7 +21,6 @@ import scala.collection.mutable.ListBuffer
trait ParserUtil extends Parsers {
protected implicit class ParserPlus[+T](underlying: Parser[T]) {
def !~>[U](p: => Parser[U]): Parser[U] = (underlying ~! p) ^^ { case a~b => b }
- def <~![U](p: => Parser[U]): Parser[T] = (underlying ~! p) ^^ { case a~b => a }
}
}
@@ -38,7 +36,6 @@ case class CommandLine(
def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs)
def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs)
- def originalArgs = args
def assumeBinary = true
def enforceArity = true
def onlyKnownOptions = false
@@ -106,7 +103,6 @@ case class CommandLine(
def isSet(arg: String) = args contains arg
def get(arg: String) = argMap get arg
- def getOrElse(arg: String, orElse: => String) = if (isSet(arg)) apply(arg) else orElse
def apply(arg: String) = argMap(arg)
override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString)
@@ -116,7 +112,6 @@ object CommandLineParser extends RegexParsers with ParserUtil {
override def skipWhitespace = false
def elemExcept(xs: Elem*): Parser[Elem] = elem("elemExcept", x => x != EofCh && !(xs contains x))
- def elemOf(xs: Elem*): Parser[Elem] = elem("elemOf", xs contains _)
def escaped(ch: Char): Parser[String] = "\\" + ch
def mkQuoted(ch: Char): Parser[String] = (
elem(ch) !~> rep(escaped(ch) | elemExcept(ch)) <~ ch ^^ (_.mkString)
diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala
index dde53dc640..ba44126df2 100755
--- a/src/compiler/scala/tools/nsc/util/DocStrings.scala
+++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala
@@ -74,7 +74,7 @@ object DocStrings {
else idx :: findAll(str, idx)(p)
}
- /** Produces a string index, which is a list of ``sections'', i.e
+ /** Produces a string index, which is a list of `sections`, i.e
* pairs of start/end positions of all tagged sections in the string.
* Every section starts with an at sign and extends to the next at sign,
* or to the end of the comment string, but excluding the final two
diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala
index 34344263e8..1608ffa425 100644
--- a/src/compiler/scala/tools/nsc/util/Exceptional.scala
+++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala
@@ -3,8 +3,6 @@ package util
import java.util.concurrent.ExecutionException
import java.lang.reflect.{ InvocationTargetException, UndeclaredThrowableException }
-import scala.reflect.internal.util.StringOps._
-import scala.language.implicitConversions
object Exceptional {
def unwrap(x: Throwable): Throwable = x match {
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
index 5421843438..e877c990f0 100644
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
@@ -14,11 +14,6 @@ trait FreshNameCreator {
*/
def newName(): String
def newName(prefix: String): String
-
- @deprecated("use newName(prefix)", "2.9.0")
- def newName(pos: scala.reflect.internal.util.Position, prefix: String): String = newName(prefix)
- @deprecated("use newName()", "2.9.0")
- def newName(pos: scala.reflect.internal.util.Position): String = newName()
}
object FreshNameCreator {
diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
index b7ed7903bc..58a5442465 100644
--- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package util
import scala.reflect.internal.Chars._
@@ -14,74 +15,32 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
def this(buf: IndexedSeq[Char], decodeUni: Boolean, error: String => Unit) =
this(buf, 0, /* 1, 1, */ decodeUni, error)
- /** produce a duplicate of this char array reader which starts reading
- * at current position, independent of what happens to original reader
- */
- def dup: JavaCharArrayReader = clone().asInstanceOf[JavaCharArrayReader]
-
- /** layout constant
- */
- val tabinc = 8
-
/** the line and column position of the current character
*/
var ch: Char = _
var bp = start
- var oldBp = -1
- var oldCh: Char = _
-
- //private var cline: Int = _
- //private var ccol: Int = _
def cpos = bp
var isUnicode: Boolean = _
- var lastLineStartPos: Int = 0
- var lineStartPos: Int = 0
- var lastBlankLinePos: Int = 0
-
- private var onlyBlankChars = false
- //private var nextline = startline
- //private var nextcol = startcol
-
- private def markNewLine() {
- lastLineStartPos = lineStartPos
- if (onlyBlankChars) lastBlankLinePos = lineStartPos
- lineStartPos = bp
- onlyBlankChars = true
- //nextline += 1
- //nextcol = 1
- }
-
- def hasNext: Boolean = if (bp < buf.length) true
- else {
- false
- }
- def last: Char = if (bp > start + 2) buf(bp - 2) else ' ' // XML literals
+ def hasNext = bp < buf.length
def next(): Char = {
- //cline = nextline
- //ccol = nextcol
val buf = this.buf.asInstanceOf[collection.mutable.WrappedArray[Char]].array
if(!hasNext) {
ch = SU
return SU // there is an endless stream of SU's at the end
}
- oldBp = bp
- oldCh = ch
ch = buf(bp)
isUnicode = false
bp = bp + 1
ch match {
case '\t' =>
- // nextcol = ((nextcol - 1) / tabinc * tabinc) + tabinc + 1;
case CR =>
- if (bp < buf.size && buf(bp) == LF) {
+ if (bp < buf.length && buf(bp) == LF) {
ch = LF
bp += 1
}
- markNewLine()
case LF | FF =>
- markNewLine()
case '\\' =>
def evenSlashPrefix: Boolean = {
var p = bp - 2
@@ -90,34 +49,23 @@ class JavaCharArrayReader(buf: IndexedSeq[Char], start: Int, /* startline: int,
}
def udigit: Int = {
val d = digit2int(buf(bp), 16)
- if (d >= 0) { bp += 1; /* nextcol = nextcol + 1 */ }
- else error("error in unicode escape");
+ if (d >= 0) bp += 1
+ else error("error in unicode escape")
d
}
- // nextcol += 1
if (buf(bp) == 'u' && decodeUni && evenSlashPrefix) {
do {
bp += 1 //; nextcol += 1
- } while (buf(bp) == 'u');
+ } while (buf(bp) == 'u')
val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit
ch = code.asInstanceOf[Char]
isUnicode = true
}
case _ =>
- if (ch > ' ') onlyBlankChars = false
- // nextcol += 1
}
ch
}
- def rewind() {
- if (oldBp == -1) throw new IllegalArgumentException
- bp = oldBp
- ch = oldCh
- oldBp = -1
- oldCh = 'x'
- }
-
def copy: JavaCharArrayReader =
new JavaCharArrayReader(buf, bp, /* nextcol, nextline, */ decodeUni, error)
}
diff --git a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala b/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
deleted file mode 100644
index aa3b7c286d..0000000000
--- a/src/compiler/scala/tools/nsc/util/MsilClassPath.scala
+++ /dev/null
@@ -1,169 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2006-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-// $Id$
-
-package scala.tools.nsc
-package util
-
-import java.io.File
-import java.net.URL
-import java.util.StringTokenizer
-import scala.util.Sorting
-import scala.collection.mutable
-import scala.tools.nsc.io.{ AbstractFile, MsilFile }
-import ch.epfl.lamp.compiler.msil.{ Type => MSILType, Assembly }
-import ClassPath.{ ClassPathContext, isTraitImplementation }
-
-/** Keeping the MSIL classpath code in its own file is important to make sure
- * we don't accidentally introduce a dependency on msil.jar in the jvm.
- */
-
-object MsilClassPath {
- def collectTypes(assemFile: AbstractFile) = {
- var res: Array[MSILType] = MSILType.EmptyTypes
- val assem = Assembly.LoadFrom(assemFile.path)
- if (assem != null) {
- // DeclaringType == null: true for non-inner classes
- res = assem.GetTypes() filter (_.DeclaringType == null)
- Sorting.stableSort(res, (t1: MSILType, t2: MSILType) => (t1.FullName compareTo t2.FullName) < 0)
- }
- res
- }
-
- /** On the java side this logic is in PathResolver, but as I'm not really
- * up to folding MSIL into that, I am encapsulating it here.
- */
- def fromSettings(settings: Settings): MsilClassPath = {
- val context =
- if (settings.inline.value) new MsilContext
- else new MsilContext { override def isValidName(name: String) = !isTraitImplementation(name) }
-
- import settings._
- new MsilClassPath(assemextdirs.value, assemrefs.value, sourcepath.value, context)
- }
-
- class MsilContext extends ClassPathContext[MsilFile] {
- def toBinaryName(rep: MsilFile) = rep.msilType.Name
- def newClassPath(assemFile: AbstractFile) = new AssemblyClassPath(MsilClassPath collectTypes assemFile, "", this)
- }
-
- private def assembleEntries(ext: String, user: String, source: String, context: MsilContext): List[ClassPath[MsilFile]] = {
- import ClassPath._
- val etr = new mutable.ListBuffer[ClassPath[MsilFile]]
- val names = new mutable.HashSet[String]
-
- // 1. Assemblies from -Xassem-extdirs
- for (dirName <- expandPath(ext, expandStar = false)) {
- val dir = AbstractFile.getDirectory(dirName)
- if (dir ne null) {
- for (file <- dir) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += context.newClassPath(file)
- }
- }
- }
- }
-
- // 2. Assemblies from -Xassem-path
- for (fileName <- expandPath(user, expandStar = false)) {
- val file = AbstractFile.getFile(fileName)
- if (file ne null) {
- val name = file.name.toLowerCase
- if (name.endsWith(".dll") || name.endsWith(".exe")) {
- names += name
- etr += context.newClassPath(file)
- }
- }
- }
-
- def check(n: String) {
- if (!names.contains(n))
- throw new AssertionError("Cannot find assembly "+ n +
- ". Use -Xassem-extdirs or -Xassem-path to specify its location")
- }
- check("mscorlib.dll")
- check("scalaruntime.dll")
-
- // 3. Source path
- for (dirName <- expandPath(source, expandStar = false)) {
- val file = AbstractFile.getDirectory(dirName)
- if (file ne null) etr += new SourcePath[MsilFile](file, context)
- }
-
- etr.toList
- }
-}
-import MsilClassPath._
-
-/**
- * A assembly file (dll / exe) containing classes and namespaces
- */
-class AssemblyClassPath(types: Array[MSILType], namespace: String, val context: MsilContext) extends ClassPath[MsilFile] {
- def name = {
- val i = namespace.lastIndexOf('.')
- if (i < 0) namespace
- else namespace drop (i + 1)
- }
- def asURLs = List(new java.net.URL(name))
- def asClasspathString = sys.error("Unknown") // I don't know what if anything makes sense here?
-
- private lazy val first: Int = {
- var m = 0
- var n = types.length - 1
- while (m < n) {
- val l = (m + n) / 2
- val res = types(l).FullName.compareTo(namespace)
- if (res < 0) m = l + 1
- else n = l
- }
- if (types(m).FullName.startsWith(namespace)) m else types.length
- }
-
- lazy val classes = {
- val cls = new mutable.ListBuffer[ClassRep]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- // CLRTypes used to exclude java.lang.Object and java.lang.String (no idea why..)
- if (types(i).Namespace == namespace)
- cls += ClassRep(Some(new MsilFile(types(i))), None)
- i += 1
- }
- cls.toIndexedSeq
- }
-
- lazy val packages = {
- val nsSet = new mutable.HashSet[String]
- var i = first
- while (i < types.length && types(i).Namespace.startsWith(namespace)) {
- val subns = types(i).Namespace
- if (subns.length > namespace.length) {
- // example: namespace = "System", subns = "System.Reflection.Emit"
- // => find second "." and "System.Reflection" to nsSet.
- val end = subns.indexOf('.', namespace.length + 1)
- nsSet += (if (end < 0) subns
- else subns.substring(0, end))
- }
- i += 1
- }
- val xs = for (ns <- nsSet.toList)
- yield new AssemblyClassPath(types, ns, context)
-
- xs.toIndexedSeq
- }
-
- val sourcepaths: IndexedSeq[AbstractFile] = IndexedSeq()
-
- override def toString() = "assembly classpath "+ namespace
-}
-
-/**
- * The classpath when compiling with target:msil. Binary files are represented as
- * MSILType values.
- */
-class MsilClassPath(ext: String, user: String, source: String, context: MsilContext)
-extends MergedClassPath[MsilFile](MsilClassPath.assembleEntries(ext, user, source, context), context) { } \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
index 2b87280c24..b804bfb842 100644
--- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala
+++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala
@@ -3,17 +3,17 @@
* @author Martin Odersky
*/
-package scala.tools
+package scala
+package tools
package nsc
package util
-import java.io.{File, FileInputStream, PrintStream}
+import java.io.PrintStream
import java.lang.Long.toHexString
import java.lang.Float.intBitsToFloat
import java.lang.Double.longBitsToDouble
import scala.reflect.internal.{Flags, Names}
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
-import interpreter.ByteCode.scalaSigBytesForPath
object ShowPickled extends Names {
import PickleFormat._
@@ -94,7 +94,6 @@ object ShowPickled extends Names {
case ANNOTATEDtpe => "ANNOTATEDtpe"
case ANNOTINFO => "ANNOTINFO"
case ANNOTARGARRAY => "ANNOTARGARRAY"
- // case DEBRUIJNINDEXtpe => "DEBRUIJNINDEXtpe"
case EXISTENTIALtpe => "EXISTENTIALtpe"
case TREE => "TREE"
case MODIFIERS => "MODIFIERS"
@@ -109,7 +108,7 @@ object ShowPickled extends Names {
var result = 0L
var b = 0L
do {
- b = data(idx)
+ b = data(idx).toLong
idx += 1
result = (result << 7) + (b & 0x7f)
} while((b & 0x80) != 0L)
@@ -165,7 +164,7 @@ object ShowPickled extends Names {
out.print(" %s[%s]".format(toHexString(pflags), flagString))
}
- /** Might be info or privateWithin */
+ /* Might be info or privateWithin */
val x = buf.readNat()
if (buf.readIndex == end) {
printFlags(None)
@@ -177,9 +176,9 @@ object ShowPickled extends Names {
}
}
- /** Note: the entries which require some semantic analysis to be correctly
- * interpreted are for the most part going to tell you the wrong thing.
- * It's not so easy to duplicate the logic applied in the UnPickler.
+ /* Note: the entries which require some semantic analysis to be correctly
+ * interpreted are for the most part going to tell you the wrong thing.
+ * It's not so easy to duplicate the logic applied in the UnPickler.
*/
def printEntry(i: Int) {
buf.readIndex = index(i)
@@ -251,7 +250,7 @@ object ShowPickled extends Names {
case SYMANNOT =>
printSymbolRef(); printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTATEDtpe =>
- printTypeRef(); buf.until(end, printAnnotInfoRef);
+ printTypeRef(); buf.until(end, printAnnotInfoRef)
case ANNOTINFO =>
printTypeRef(); buf.until(end, printAnnotArgRef)
case ANNOTARGARRAY =>
@@ -272,8 +271,7 @@ object ShowPickled extends Names {
for (i <- 0 until index.length) printEntry(i)
}
- def fromFile(path: String) = fromBytes(io.File(path).toByteArray)
- def fromName(name: String) = fromBytes(scalaSigBytesForPath(name) getOrElse Array())
+ def fromFile(path: String) = fromBytes(io.File(path).toByteArray())
def fromBytes(data: => Array[Byte]): Option[PickleBuffer] =
try Some(new PickleBuffer(data, 0, data.length))
catch { case _: Exception => None }
@@ -288,7 +286,7 @@ object ShowPickled extends Names {
def main(args: Array[String]) {
args foreach { arg =>
- (fromFile(arg) orElse fromName(arg)) match {
+ fromFile(arg) match {
case Some(pb) => show(arg + ":", pb)
case _ => Console.println("Cannot read " + arg)
}
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
index 2601798b96..4e1cf02a6e 100644
--- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -6,7 +6,7 @@ package util
import java.io.PrintStream
/** A simple tracer
- * @param out: The print stream where trace info shoul be sent
+ * @param out: The print stream where trace info should be sent
* @param enabled: A condition that must be true for trace info to be produced.
*/
class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
@@ -14,6 +14,5 @@ class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
if (enabled) out.println(msg+value)
value
}
- def withOutput(out: PrintStream) = new SimpleTracer(out, enabled)
def when(enabled: Boolean): SimpleTracer = new SimpleTracer(out, enabled)
}
diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
index b1f4696d3e..4f7a9ff878 100644
--- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
+++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala
@@ -7,9 +7,9 @@ class WorkScheduler {
type Action = () => Unit
- private var todo = new mutable.Queue[Action]
- private var throwables = new mutable.Queue[Throwable]
- private var interruptReqs = new mutable.Queue[InterruptReq]
+ private val todo = new mutable.Queue[Action]
+ private val throwables = new mutable.Queue[Throwable]
+ private val interruptReqs = new mutable.Queue[InterruptReq]
/** Called from server: block until one of todo list, throwables or interruptReqs is nonempty */
def waitForMoreWork() = synchronized {
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index d34d4ee092..1e43d18900 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -3,7 +3,9 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
import java.io.{ OutputStream, PrintStream, ByteArrayOutputStream, PrintWriter, StringWriter }
@@ -18,16 +20,9 @@ package object util {
type HashSet[T >: Null <: AnyRef] = scala.reflect.internal.util.HashSet[T]
val HashSet = scala.reflect.internal.util.HashSet
- def onull[T](value: T, orElse: => T): T = if (value == null) orElse else value
-
/** Apply a function and return the passed value */
def returning[T](x: T)(f: T => Unit): T = { f(x) ; x }
- /** Frequency counter */
- def freq[T](xs: Traversable[T]): Map[T, Int] = xs groupBy identity mapValues (_.size)
-
- def freqrank[T](xs: Traversable[(T, Int)]): List[(Int, T)] = xs.toList map (_.swap) sortBy (-_._1)
-
/** Execute code and then wait for all non-daemon Threads
* created and begun during its execution to complete.
*/
@@ -54,18 +49,6 @@ package object util {
(result, ts2 filterNot (ts1 contains _))
}
- /** Given a function and a block of code, evaluates code block,
- * calls function with milliseconds elapsed, and returns block result.
- */
- def millisElapsedTo[T](f: Long => Unit)(body: => T): T = {
- val start = System.currentTimeMillis
- val result = body
- val end = System.currentTimeMillis
-
- f(end - start)
- result
- }
-
/** Generate a string using a routine that wants to write on a stream. */
def stringFromWriter(writer: PrintWriter => Unit): String = {
val stringWriter = new StringWriter()
@@ -83,8 +66,19 @@ package object util {
}
def stackTraceString(ex: Throwable): String = stringFromWriter(ex printStackTrace _)
+ /** A one line string which contains the class of the exception, the
+ * message if any, and the first non-Predef location in the stack trace
+ * (to exclude assert, require, etc.)
+ */
+ def stackTraceHeadString(ex: Throwable): String = {
+ val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString ""
+ val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" }
+ val clazz = ex.getClass.getName.split('.').last
+
+ s"$clazz$msg @ $frame"
+ }
+
lazy val trace = new SimpleTracer(System.out)
- lazy val errtrace = new SimpleTracer(System.err)
@deprecated("Moved to scala.reflect.internal.util.StringOps", "2.10.0")
val StringOps = scala.reflect.internal.util.StringOps
@@ -139,4 +133,13 @@ package object util {
@deprecated("Moved to scala.reflect.internal.util.BatchSourceFile", "2.10.0")
type BatchSourceFile = scala.reflect.internal.util.BatchSourceFile
+
+ @deprecated("Moved to scala.reflect.internal.util.AbstractFileClassLoader", "2.11.0")
+ type AbstractFileClassLoader = scala.reflect.internal.util.AbstractFileClassLoader
+
+ @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+ val ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
+
+ @deprecated("Moved to scala.reflect.internal.util.ScalaClassLoader", "2.11.0")
+ type ScalaClassLoader = scala.reflect.internal.util.ScalaClassLoader
}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index d35ac43424..5a0ff4f6db 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -2,7 +2,10 @@ package scala.tools
package reflect
import scala.reflect.reify.Taggers
-import scala.tools.nsc.typechecker.{Analyzer, Macros}
+import scala.tools.nsc.typechecker.{ Analyzer, Macros }
+import scala.reflect.runtime.Macros.currentMirror
+import scala.reflect.api.Universe
+import scala.reflect.macros.compiler.DefaultMacroCompiler
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -12,30 +15,32 @@ trait FastTrack {
import global._
import definitions._
-
import scala.language.implicitConversions
- private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
- private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
+ import treeInfo.Applied
+
+ private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
+ new { val c: c0.type = c0 } with Taggers
+ private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
+ new { val c: c0.type = c0 } with MacroImplementations
+ private def make(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+ sym -> new FastTrackEntry(pf)
- implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
- type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
- case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
- def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
- def run(c: MacroContext): Any = {
- val result = expander((c, c.expandee))
- c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
+ final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree]) extends (MacroArgs => Any) {
+ def validate(tree: Tree) = pf isDefinedAt Applied(tree)
+ def apply(margs: MacroArgs): margs.c.Expr[Nothing] = {
+ val MacroArgs(c, _) = margs
+ // Macros validated that the pf is defined here - and there's not much we could do if it weren't.
+ c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
}
}
- lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
- var registry = Map[Symbol, FastTrackEntry]()
- implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
- materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
- materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
- materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
- ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
- ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
- StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
- registry
- }
+ /** A map from a set of pre-established macro symbols to their implementations. */
+ lazy val fastTrack = Map[Symbol, FastTrackEntry](
+ make( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) },
+ make( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
+ make( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
+ make( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
+ make( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
+ make(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree }
+ )
}
diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala
index f0d3d5973d..e3341a451f 100644
--- a/src/compiler/scala/tools/reflect/FrontEnd.scala
+++ b/src/compiler/scala/tools/reflect/FrontEnd.scala
@@ -21,7 +21,7 @@ trait FrontEnd {
def hasErrors = ERROR.count > 0
def hasWarnings = WARNING.count > 0
- case class Info(val pos: Position, val msg: String, val severity: Severity)
+ case class Info(pos: Position, msg: String, severity: Severity)
val infos = new scala.collection.mutable.LinkedHashSet[Info]
/** Handles incoming info */
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index f4f385f8b3..8e1bcb5f87 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -1,7 +1,6 @@
package scala.tools.reflect
-import scala.reflect.macros.{ReificationException, UnexpectedReificationException}
-import scala.reflect.macros.runtime.Context
+import scala.reflect.macros.contexts.Context
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
import scala.reflect.internal.util.OffsetPosition
@@ -35,7 +34,7 @@ abstract class MacroImplementations {
val argStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
- val freshName = newTermName(c.fresh("arg$"))
+ val freshName = newTermName(c.freshName("arg$"))
evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
ids += Ident(freshName)
}
@@ -106,7 +105,7 @@ abstract class MacroImplementations {
// 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
// 7) "...${smth}[%illegalJavaConversion]" => error
if (!first) {
- val arg = argStack.pop
+ val arg = argStack.pop()
if (isConversion(0)) {
// PRE str is not empty and str(0) == '%'
// argument index parameter is not allowed, thus parse
@@ -163,9 +162,9 @@ abstract class MacroImplementations {
Literal(Constant(fstring)),
newTermName("format")),
List(ids: _* )
- );
+ )
Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala
index 116ae24cdd..3ae21b6b98 100644
--- a/src/compiler/scala/tools/reflect/ReflectMain.scala
+++ b/src/compiler/scala/tools/reflect/ReflectMain.scala
@@ -4,7 +4,6 @@ package reflect
import scala.tools.nsc.Driver
import scala.tools.nsc.Global
import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath.DefaultJavaContext
import scala.tools.nsc.util.ScalaClassLoader
import scala.tools.util.PathResolver
@@ -16,4 +15,4 @@ object ReflectMain extends Driver {
}
override def newCompiler(): Global = new ReflectGlobal(settings, reporter, classloaderFromSettings(settings))
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala
index a3bc9b9bd1..6c1821f8aa 100644
--- a/src/compiler/scala/tools/reflect/StdTags.scala
+++ b/src/compiler/scala/tools/reflect/StdTags.scala
@@ -1,7 +1,6 @@
package scala.tools
package reflect
-import java.lang.{Class => jClass}
import scala.reflect.{ClassTag, classTag}
import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
@@ -24,7 +23,7 @@ trait StdTags {
}
})
- private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+ protected def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
u.TypeTag[T](
m,
new TypeCreator {
@@ -35,8 +34,6 @@ trait StdTags {
lazy val tagOfString = tagOfStaticClass[String]
lazy val tagOfFile = tagOfStaticClass[scala.tools.nsc.io.File]
lazy val tagOfDirectory = tagOfStaticClass[scala.tools.nsc.io.Directory]
- lazy val tagOfStdReplVals = tagOfStaticClass[scala.tools.nsc.interpreter.StdReplVals]
- lazy val tagOfIMain = tagOfStaticClass[scala.tools.nsc.interpreter.IMain]
lazy val tagOfThrowable = tagOfStaticClass[java.lang.Throwable]
lazy val tagOfClassLoader = tagOfStaticClass[java.lang.ClassLoader]
lazy val tagOfBigInt = tagOfStaticClass[BigInt]
diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala
index ab814b617d..be22003114 100644
--- a/src/compiler/scala/tools/reflect/ToolBox.scala
+++ b/src/compiler/scala/tools/reflect/ToolBox.scala
@@ -101,4 +101,4 @@ trait ToolBox[U <: scala.reflect.api.Universe] {
/** Represents an error during toolboxing
*/
-case class ToolBoxError(val message: String, val cause: Throwable = null) extends Throwable(message, cause)
+case class ToolBoxError(message: String, cause: Throwable = null) extends Throwable(message, cause)
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 64b8870619..c53d10bd87 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -1,13 +1,11 @@
-package scala.tools
+package scala
+package tools
package reflect
import scala.tools.nsc.reporters._
import scala.tools.nsc.CompilerCommand
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
import scala.tools.nsc.io.VirtualDirectory
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import scala.tools.nsc.util.FreshNameCreator
+import scala.tools.nsc.util.AbstractFileClassLoader
import scala.reflect.internal.Flags._
import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, NoFile}
import java.lang.{Class => jClass}
@@ -29,8 +27,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, factorySelf.mirror.classLoader)
lazy val mirror: u.Mirror = u.runtimeMirror(classLoader)
- class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: Reporter)
- extends ReflectGlobal(settings, reporter, toolBoxSelf.classLoader) {
+ class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter0: Reporter)
+ extends ReflectGlobal(settings, reporter0, toolBoxSelf.classLoader) {
import definitions._
private val trace = scala.tools.nsc.util.trace when settings.debug.value
@@ -73,13 +71,14 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree])
if (!typed.isEmpty) throw ToolBoxError("reflective toolbox has failed: cannot operate on trees that are already typed")
- val freeTypes = expr.freeTypes
- if (freeTypes.length > 0) {
- var msg = "reflective toolbox has failed:" + EOL
- msg += "unresolved free type variables (namely: " + (freeTypes map (ft => "%s %s".format(ft.name, ft.origin)) mkString ", ") + "). "
- msg += "have you forgot to use TypeTag annotations for type parameters external to a reifee? "
- msg += "if you have troubles tracking free type variables, consider using -Xlog-free-types"
- throw ToolBoxError(msg)
+ if (expr.freeTypes.nonEmpty) {
+ val ft_s = expr.freeTypes map (ft => s" ${ft.name} ${ft.origin}") mkString "\n "
+ throw ToolBoxError(s"""
+ |reflective toolbox failed due to unresolved free type variables:
+ |$ft_s
+ |have you forgotten to use TypeTag annotations for type parameters external to a reifee?
+ |if you have troubles tracking free type variables, consider using -Xlog-free-types
+ """.stripMargin.trim)
}
}
@@ -100,9 +99,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
if (namesakes.length > 0) name += ("$" + (namesakes.length + 1))
freeTermNames += (ft -> newTermName(name + nme.REIFY_FREE_VALUE_SUFFIX))
})
- var expr = new Transformer {
+ val expr = new Transformer {
override def transform(tree: Tree): Tree =
- if (tree.hasSymbol && tree.symbol.isFreeTerm) {
+ if (tree.hasSymbolField && tree.symbol.isFreeTerm) {
tree match {
case Ident(_) =>
val freeTermRef = Ident(freeTermNames(tree.symbol.asFreeTerm))
@@ -130,9 +129,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// it inaccessible then please put it somewhere designed for that
// rather than polluting the empty package with synthetics.
val ownerClass = rootMirror.EmptyPackageClass.newClassSymbol(newTypeName("<expression-owner>"))
- build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectClass.tpe), newScope, ownerClass))
+ build.setTypeSignature(ownerClass, ClassInfoType(List(ObjectTpe), newScope, ownerClass))
val owner = ownerClass.newLocalDummy(expr.pos)
- var currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
+ val currentTyper = analyzer.newTyper(analyzer.rootContext(NoCompilationUnit, EmptyTree).make(expr, owner))
val wrapper1 = if (!withImplicitViewsDisabled) (currentTyper.context.withImplicitsEnabled[Tree] _) else (currentTyper.context.withImplicitsDisabled[Tree] _)
val wrapper2 = if (!withMacrosDisabled) (currentTyper.context.withMacrosEnabled[Tree] _) else (currentTyper.context.withMacrosDisabled[Tree] _)
def wrapper (tree: => Tree) = wrapper1(wrapper2(tree))
@@ -143,21 +142,21 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val expr1 = wrapper(transform(currentTyper, expr))
var (dummies1, unwrapped) = expr1 match {
- case Block(dummies, unwrapped) => (dummies, unwrapped)
- case unwrapped => (Nil, unwrapped)
+ case Block(dummies, unwrapped) => ((dummies, unwrapped))
+ case unwrapped => ((Nil, unwrapped))
}
- var invertedIndex = freeTerms map (_.swap)
+ val invertedIndex = freeTerms map (_.swap)
// todo. also fixup singleton types
unwrapped = new Transformer {
override def transform(tree: Tree): Tree =
tree match {
- case Ident(name) if invertedIndex contains name =>
+ case Ident(name: TermName) if invertedIndex contains name =>
Ident(invertedIndex(name)) setType tree.tpe
case _ =>
super.transform(tree)
}
}.transform(unwrapped)
- new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name)))).traverse(unwrapped)
+ new TreeTypeSubstituter(dummies1 map (_.symbol), dummies1 map (dummy => SingleType(NoPrefix, invertedIndex(dummy.symbol.name.toTermName)))).traverse(unwrapped)
unwrapped = if (expr0.isTerm) unwrapped else unwrapFromTerm(unwrapped)
unwrapped
}
@@ -166,7 +165,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
transformDuringTyper(expr, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)(
(currentTyper, expr) => {
trace("typing (implicit views = %s, macros = %s): ".format(!withImplicitViewsDisabled, !withMacrosDisabled))(showAttributed(expr, true, true, settings.Yshowsymkinds.value))
- currentTyper.silent(_.typed(expr, analyzer.EXPRmode, pt), reportAmbiguousErrors = false) match {
+ currentTyper.silent(_.typed(expr, pt), reportAmbiguousErrors = false) match {
case analyzer.SilentResultValue(result) =>
trace("success: ")(showAttributed(result, true, true, settings.Yshowsymkinds.value))
result
@@ -194,10 +193,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def wrap(expr0: Tree): ModuleDef = {
val (expr, freeTerms) = extractFreeTerms(expr0, wrapFreeTermRefs = true)
- val (obj, mclazz) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
+ val (obj, _) = rootMirror.EmptyPackageClass.newModuleAndClassSymbol(
nextWrapperModuleName())
- val minfo = ClassInfoType(List(ObjectClass.tpe), newScope, obj.moduleClass)
+ val minfo = ClassInfoType(List(ObjectTpe), newScope, obj.moduleClass)
obj.moduleClass setInfo minfo
obj setInfo obj.moduleClass.tpe
@@ -207,7 +206,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val (fv, name) = schema
meth.newValueParameter(name, newFlags = if (fv.hasStableFlag) STABLE else 0) setInfo appliedType(definitions.FunctionClass(0).tpe, List(fv.tpe.resultType))
}
- meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyClass.tpe)
+ meth setInfo MethodType(freeTerms.map(makeParam).toList, AnyTpe)
minfo.decls enter meth
def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match {
case Some(sym) if sym != null && sym != NoSymbol => sym.owner
@@ -219,16 +218,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val moduledef = ModuleDef(
obj,
Template(
- List(TypeTree(ObjectClass.tpe)),
+ List(TypeTree(ObjectTpe)),
emptyValDef,
NoMods,
List(),
- List(List()),
List(methdef),
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
- var cleanedUp = resetLocalAttrs(moduledef)
+ val cleanedUp = resetLocalAttrs(moduledef)
trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value))
cleanedUp.asInstanceOf[ModuleDef]
}
@@ -244,7 +242,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
throwIfErrors()
val className = mdef.symbol.fullName
- if (settings.debug.value) println("generated: "+className)
+ if (settings.debug) println("generated: "+className)
def moduleFileName(className: String) = className + "$"
val jclazz = jClass.forName(moduleFileName(className), true, classLoader)
val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get
@@ -277,7 +275,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val file = new BatchSourceFile("<toolbox>", wrappedCode)
val unit = new CompilationUnit(file)
phase = run.parserPhase
- val parser = new syntaxAnalyzer.UnitParser(unit)
+ val parser = newUnitParser(unit)
val wrappedTree = parser.parse()
throwIfErrors()
val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
@@ -305,11 +303,9 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
// reporter doesn't accumulate errors, but the front-end does
def throwIfErrors() = {
- if (frontEnd.hasErrors) {
- var msg = "reflective compilation has failed: " + EOL + EOL
- msg += frontEnd.infos map (_.msg) mkString EOL
- throw ToolBoxError(msg)
- }
+ if (frontEnd.hasErrors) throw ToolBoxError(
+ "reflective compilation has failed: " + EOL + EOL + (frontEnd.infos map (_.msg) mkString EOL)
+ )
}
}
@@ -329,15 +325,15 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
command.settings.outputDirs setSingleOutput virtualDirectory
val instance = new ToolBoxGlobal(command.settings, frontEndToReporter(frontEnd, command.settings))
if (frontEnd.hasErrors) {
- var msg = "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL
- msg += frontEnd.infos map (_.msg) mkString EOL
- throw ToolBoxError(msg)
+ throw ToolBoxError(
+ "reflective compilation has failed: cannot initialize the compiler: " + EOL + EOL +
+ (frontEnd.infos map (_.msg) mkString EOL)
+ )
}
instance
} catch {
case ex: Throwable =>
- var msg = "reflective compilation has failed: cannot initialize the compiler due to %s".format(ex.toString)
- throw ToolBoxError(msg, ex)
+ throw ToolBoxError(s"reflective compilation has failed: cannot initialize the compiler due to $ex", ex)
}
}
@@ -345,11 +341,11 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val exporter = importer.reverse
def typeCheck(tree: u.Tree, expectedType: u.Type, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): u.Tree = compiler.withCleanupCaches {
- if (compiler.settings.verbose.value) println("importing "+tree+", expectedType = "+expectedType)
- var ctree: compiler.Tree = importer.importTree(tree)
- var cexpectedType: compiler.Type = importer.importType(expectedType)
+ if (compiler.settings.verbose) println("importing "+tree+", expectedType = "+expectedType)
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val cexpectedType: compiler.Type = importer.importType(expectedType)
- if (compiler.settings.verbose.value) println("typing "+ctree+", expectedType = "+expectedType)
+ if (compiler.settings.verbose) println("typing "+ctree+", expectedType = "+expectedType)
val ttree: compiler.Tree = compiler.typeCheck(ctree, cexpectedType, silent = silent, withImplicitViewsDisabled = withImplicitViewsDisabled, withMacrosDisabled = withMacrosDisabled)
val uttree = exporter.importTree(ttree)
uttree
@@ -365,12 +361,12 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
}
private def inferImplicit(tree: u.Tree, pt: u.Type, isView: Boolean, silent: Boolean, withMacrosDisabled: Boolean, pos: u.Position): u.Tree = compiler.withCleanupCaches {
- if (compiler.settings.verbose.value) println("importing "+pt, ", tree = "+tree+", pos = "+pos)
- var ctree: compiler.Tree = importer.importTree(tree)
- var cpt: compiler.Type = importer.importType(pt)
- var cpos: compiler.Position = importer.importPosition(pos)
+ if (compiler.settings.verbose) println(s"importing pt=$pt, tree=$tree, pos=$pos")
+ val ctree: compiler.Tree = importer.importTree(tree)
+ val cpt: compiler.Type = importer.importType(pt)
+ val cpos: compiler.Position = importer.importPosition(pos)
- if (compiler.settings.verbose.value) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
+ if (compiler.settings.verbose) println("inferring implicit %s of type %s, macros = %s".format(if (isView) "view" else "value", pt, !withMacrosDisabled))
val itree: compiler.Tree = compiler.inferImplicit(ctree, cpt, isView = isView, silent = silent, withMacrosDisabled = withMacrosDisabled, pos = cpos)
val uitree = exporter.importTree(itree)
uitree
@@ -390,21 +386,18 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
uttree
}
- def showAttributed(tree: u.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String =
- compiler.showAttributed(importer.importTree(tree), printTypes, printIds, printKinds)
-
def parse(code: String): u.Tree = {
- if (compiler.settings.verbose.value) println("parsing "+code)
+ if (compiler.settings.verbose) println("parsing "+code)
val ctree: compiler.Tree = compiler.parse(code)
val utree = exporter.importTree(ctree)
utree
}
def compile(tree: u.Tree): () => Any = {
- if (compiler.settings.verbose.value) println("importing "+tree)
- var ctree: compiler.Tree = importer.importTree(tree)
+ if (compiler.settings.verbose) println("importing "+tree)
+ val ctree: compiler.Tree = importer.importTree(tree)
- if (compiler.settings.verbose.value) println("compiling "+ctree)
+ if (compiler.settings.verbose) println("compiling "+ctree)
compiler.compile(ctree)
}
diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala
index c34edb8ba0..20567719be 100644
--- a/src/compiler/scala/tools/reflect/WrappedProperties.scala
+++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala
@@ -25,6 +25,7 @@ trait WrappedProperties extends PropertiesTrait {
override def clearProp(name: String) = wrap(super.clearProp(name)).orNull
override def envOrElse(name: String, alt: String) = wrap(super.envOrElse(name, alt)) getOrElse alt
override def envOrNone(name: String) = wrap(super.envOrNone(name)).flatten
+ override def envOrSome(name: String, alt: Option[String]) = wrap(super.envOrNone(name)).flatten orElse alt
def systemProperties: Iterator[(String, String)] = {
import scala.collection.JavaConverters._
diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala
index 3f880bf7f8..1055894121 100644
--- a/src/compiler/scala/tools/reflect/package.scala
+++ b/src/compiler/scala/tools/reflect/package.scala
@@ -32,7 +32,7 @@ package object reflect {
/** Creates a reporter that prints messages to the console according to the settings.
*
- * ``minSeverity'' determines minimum severity of the messages to be printed.
+ * `minSeverity` determines minimum severity of the messages to be printed.
* 0 stands for INFO, 1 stands for WARNING and 2 stands for ERROR.
*/
// todo. untangle warningsAsErrors from Reporters. I don't feel like moving this flag here!
@@ -52,7 +52,7 @@ package object reflect {
override def hasWarnings = reporter.hasWarnings
def display(info: Info): Unit = info.severity match {
- case API_INFO => reporter.info(info.pos, info.msg, false)
+ case API_INFO => reporter.info(info.pos, info.msg, force = false)
case API_WARNING => reporter.warning(info.pos, info.msg)
case API_ERROR => reporter.error(info.pos, info.msg)
}
@@ -76,7 +76,6 @@ package object reflect {
private[reflect] def frontEndToReporter(frontEnd: FrontEnd, settings0: Settings): Reporter = new AbstractReporter {
val settings = settings0
- import frontEnd.{Severity => ApiSeverity}
val API_INFO = frontEnd.INFO
val API_WARNING = frontEnd.WARNING
val API_ERROR = frontEnd.ERROR
diff --git a/src/compiler/scala/tools/util/Javap.scala b/src/compiler/scala/tools/util/Javap.scala
index c3264d0787..3cfc1eb2a1 100644
--- a/src/compiler/scala/tools/util/Javap.scala
+++ b/src/compiler/scala/tools/util/Javap.scala
@@ -6,13 +6,14 @@
package scala.tools
package util
-import java.lang.reflect.{ GenericSignatureFormatError, Method, Constructor }
-import java.lang.{ ClassLoader => JavaClassLoader }
import scala.tools.nsc.util.ScalaClassLoader
-import java.io.{ InputStream, PrintWriter, ByteArrayInputStream, FileNotFoundException }
-import scala.tools.nsc.io.File
-import Javap._
-import scala.language.reflectiveCalls
+import java.io.PrintWriter
+
+trait JpResult {
+ def isError: Boolean
+ def value: Any
+ def show(): Unit
+}
trait Javap {
def loader: ScalaClassLoader
@@ -29,147 +30,3 @@ object NoJavap extends Javap {
def tryFile(path: String): Option[Array[Byte]] = None
def tryClass(path: String): Array[Byte] = Array()
}
-
-class JavapClass(
- val loader: ScalaClassLoader = ScalaClassLoader.appLoader,
- val printWriter: PrintWriter = new PrintWriter(System.out, true)
-) extends Javap {
-
- lazy val parser = new JpOptions
-
- val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
- val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
- private def failed = (EnvClass eq null) || (PrinterClass eq null)
-
- val PrinterCtr = (
- if (failed) null
- else PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass)
- )
-
- def findBytes(path: String): Array[Byte] =
- tryFile(path) getOrElse tryClass(path)
-
- def apply(args: Seq[String]): List[JpResult] = {
- if (failed) Nil
- else args.toList filterNot (_ startsWith "-") map { path =>
- val bytes = findBytes(path)
- if (bytes.isEmpty) new JpError("Could not find class bytes for '%s'".format(path))
- else new JpSuccess(newPrinter(new ByteArrayInputStream(bytes), newEnv(args)))
- }
- }
-
- def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
- if (failed) null
- else PrinterCtr.newInstance(in, printWriter, env)
-
- def newEnv(opts: Seq[String]): FakeEnvironment = {
- lazy val env: FakeEnvironment = EnvClass.newInstance()
-
- if (failed) null
- else parser(opts) foreach { case (name, value) =>
- val field = EnvClass getDeclaredField name
- field setAccessible true
- field.set(env, value.asInstanceOf[AnyRef])
- }
-
- env
- }
-
- /** Assume the string is a path and try to find the classfile
- * it represents.
- */
- def tryFile(path: String): Option[Array[Byte]] = {
- val file = File(
- if (path.endsWith(".class")) path
- else path.replace('.', '/') + ".class"
- )
- if (!file.exists) None
- else try Some(file.toByteArray) catch { case x: Exception => None }
- }
- /** Assume the string is a fully qualified class name and try to
- * find the class object it represents.
- */
- def tryClass(path: String): Array[Byte] = {
- val extName = (
- if (path endsWith ".class") (path dropRight 6).replace('/', '.')
- else path
- )
- loader.classBytes(extName)
- }
-}
-
-object Javap {
- val Env = "sun.tools.javap.JavapEnvironment"
- val Printer = "sun.tools.javap.JavapPrinter"
-
- def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) =
- cl.tryToInitializeClass[AnyRef](Env).isDefined
-
- // "documentation"
- type FakeEnvironment = AnyRef
- type FakePrinter = AnyRef
-
- def apply(path: String): Unit = apply(Seq(path))
- def apply(args: Seq[String]): Unit = new JavapClass() apply args foreach (_.show())
-
- sealed trait JpResult {
- type ResultType
- def isError: Boolean
- def value: ResultType
- def show(): Unit
- // todo
- // def header(): String
- // def fields(): List[String]
- // def methods(): List[String]
- // def signatures(): List[String]
- }
- class JpError(msg: String) extends JpResult {
- type ResultType = String
- def isError = true
- def value = msg
- def show() = println(msg)
- }
- class JpSuccess(val value: AnyRef) extends JpResult {
- type ResultType = AnyRef
- def isError = false
- def show() = value.asInstanceOf[{ def print(): Unit }].print()
- }
-
- class JpOptions {
- private object Access {
- final val PRIVATE = 0
- final val PROTECTED = 1
- final val PACKAGE = 2
- final val PUBLIC = 3
- }
- private val envActionMap: Map[String, (String, Any)] = {
- val map = Map(
- "-l" -> (("showLineAndLocal", true)),
- "-c" -> (("showDisassembled", true)),
- "-s" -> (("showInternalSigs", true)),
- "-verbose" -> (("showVerbose", true)),
- "-private" -> (("showAccess", Access.PRIVATE)),
- "-package" -> (("showAccess", Access.PACKAGE)),
- "-protected" -> (("showAccess", Access.PROTECTED)),
- "-public" -> (("showAccess", Access.PUBLIC)),
- "-all" -> (("showallAttr", true))
- )
- map ++ List(
- "-v" -> map("-verbose"),
- "-p" -> map("-private")
- )
- }
- def apply(opts: Seq[String]): Seq[(String, Any)] = {
- opts flatMap { opt =>
- envActionMap get opt match {
- case Some(pair) => List(pair)
- case _ =>
- val charOpts = opt.tail.toSeq map ("-" + _)
- if (charOpts forall (envActionMap contains _))
- charOpts map envActionMap
- else Nil
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala
index 0af1011bda..bdd6a02043 100644
--- a/src/compiler/scala/tools/util/PathResolver.scala
+++ b/src/compiler/scala/tools/util/PathResolver.scala
@@ -3,37 +3,45 @@
* @author Paul Phillips
*/
-package scala.tools
+package scala
+package tools
package util
-import java.net.{ URL, MalformedURLException }
import scala.tools.reflect.WrappedProperties.AccessControl
-import nsc.{ Settings, GenericRunnerSettings }
-import nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
-import nsc.io.{ File, Directory, Path, AbstractFile }
+import scala.tools.nsc.{ Settings, GenericRunnerSettings }
+import scala.tools.nsc.util.{ ClassPath, JavaClassPath, ScalaClassLoader }
+import scala.reflect.io.{ File, Directory, Path, AbstractFile }
import ClassPath.{ JavaContext, DefaultJavaContext, join, split }
import PartialFunction.condOpt
import scala.language.postfixOps
// Loosely based on the draft specification at:
-// https://wiki.scala-lang.org/display/SW/Classpath
+// https://wiki.scala-lang.org/display/SIW/Classpath
object PathResolver {
- // Imports property/environment functions which suppress
- // security exceptions.
+ // Imports property/environment functions which suppress security exceptions.
import AccessControl._
-
- def firstNonEmpty(xs: String*) = xs find (_ != "") getOrElse ""
-
- /** Map all classpath elements to absolute paths and reconstruct the classpath.
- */
- def makeAbsolute(cp: String) = ClassPath.map(cp, x => Path(x).toAbsolute.path)
+ import scala.compat.Platform.EOL
+
+ implicit class MkLines(val t: TraversableOnce[_]) extends AnyVal {
+ def mkLines: String = t.mkString("", EOL, EOL)
+ def mkLines(header: String, indented: Boolean = false, embraced: Boolean = false): String = {
+ val space = "\u0020"
+ val sep = if (indented) EOL + space * 2 else EOL
+ val (lbrace, rbrace) = if (embraced) (space + "{", EOL + "}") else ("", "")
+ t.mkString(header + lbrace + sep, sep, rbrace + EOL)
+ }
+ }
+ implicit class AsLines(val s: String) extends AnyVal {
+ // sm"""...""" could do this in one pass
+ def asLines = s.trim.stripMargin.lines.mkLines
+ }
/** pretty print class path */
def ppcp(s: String) = split(s) match {
case Nil => ""
case Seq(x) => x
- case xs => xs map ("\n" + _) mkString
+ case xs => xs.mkString(EOL, EOL, "")
}
/** Values found solely by inspecting environment or property variables.
@@ -46,8 +54,7 @@ object PathResolver {
/** Environment variables which java pays attention to so it
* seems we do as well.
*/
- def classPathEnv = envOrElse("CLASSPATH", "")
- def sourcePathEnv = envOrElse("SOURCEPATH", "")
+ def sourcePathEnv = envOrElse("SOURCEPATH", "")
def javaBootClassPath = propOrElse("sun.boot.class.path", searchForBootClasspath)
def javaExtDirs = propOrEmpty("java.ext.dirs")
@@ -58,20 +65,14 @@ object PathResolver {
def javaUserClassPath = propOrElse("java.class.path", "")
def useJavaClassPath = propOrFalse("scala.usejavacp")
- override def toString = """
+ override def toString = s"""
|object Environment {
- | scalaHome = %s (useJavaClassPath = %s)
- | javaBootClassPath = <%d chars>
- | javaExtDirs = %s
- | javaUserClassPath = %s
- | scalaExtDirs = %s
- |}""".trim.stripMargin.format(
- scalaHome, useJavaClassPath,
- javaBootClassPath.length,
- ppcp(javaExtDirs),
- ppcp(javaUserClassPath),
- ppcp(scalaExtDirs)
- )
+ | scalaHome = $scalaHome (useJavaClassPath = $useJavaClassPath)
+ | javaBootClassPath = <${javaBootClassPath.length} chars>
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ |}""".asLines
}
/** Default values based on those in Environment as interpreted according
@@ -86,7 +87,6 @@ object PathResolver {
def scalaHome = Environment.scalaHome
def scalaHomeDir = Directory(scalaHome)
- def scalaHomeExists = scalaHomeDir.isDirectory
def scalaLibDir = Directory(scalaHomeDir / "lib")
def scalaClassesDir = Directory(scalaHomeDir / "classes")
@@ -109,33 +109,61 @@ object PathResolver {
// classpath as set up by the runner (or regular classpath under -nobootcp)
// and then again here.
def scalaBootClassPath = ""
- // scalaLibDirFound match {
- // case Some(dir) if scalaHomeExists =>
- // val paths = ClassPath expandDir dir.path
- // join(paths: _*)
- // case _ => ""
- // }
-
def scalaExtDirs = Environment.scalaExtDirs
-
def scalaPluginPath = (scalaHomeDir / "misc" / "scala-devel" / "plugins").path
- override def toString = """
+ override def toString = s"""
|object Defaults {
- | scalaHome = %s
- | javaBootClassPath = %s
- | scalaLibDirFound = %s
- | scalaLibFound = %s
- | scalaBootClassPath = %s
- | scalaPluginPath = %s
- |}""".trim.stripMargin.format(
- scalaHome,
- ppcp(javaBootClassPath),
- scalaLibDirFound, scalaLibFound,
- ppcp(scalaBootClassPath), ppcp(scalaPluginPath)
- )
+ | scalaHome = $scalaHome
+ | javaBootClassPath = ${ppcp(javaBootClassPath)}
+ | scalaLibDirFound = $scalaLibDirFound
+ | scalaLibFound = $scalaLibFound
+ | scalaBootClassPath = ${ppcp(scalaBootClassPath)}
+ | scalaPluginPath = ${ppcp(scalaPluginPath)}
+ |}""".asLines
+ }
+
+ /** Locations discovered by supplemental heuristics.
+ */
+ object SupplementalLocations {
+
+ /** The platform-specific support jar.
+ *
+ * Usually this is `tools.jar` in the jdk/lib directory of the platform distribution.
+ *
+ * The file location is determined by probing the lib directory under JDK_HOME or JAVA_HOME,
+ * if one of those environment variables is set, then the lib directory under java.home,
+ * and finally the lib directory under the parent of java.home. Or, as a last resort,
+ * search deeply under those locations (except for the parent of java.home, on the notion
+ * that if this is not a canonical installation, then that search would have little
+ * chance of succeeding).
+ */
+ def platformTools: Option[File] = {
+ val jarName = "tools.jar"
+ def jarPath(path: Path) = (path / "lib" / jarName).toFile
+ def jarAt(path: Path) = {
+ val f = jarPath(path)
+ if (f.isFile) Some(f) else None
+ }
+ val jdkDir = {
+ val d = Directory(jdkHome)
+ if (d.isDirectory) Some(d) else None
+ }
+ def deeply(dir: Directory) = dir.deepFiles find (_.name == jarName)
+
+ val home = envOrSome("JDK_HOME", envOrNone("JAVA_HOME")) map (p => Path(p))
+ val install = Some(Path(javaHome))
+
+ (home flatMap jarAt) orElse (install flatMap jarAt) orElse (install map (_.parent) flatMap jarAt) orElse
+ (jdkDir flatMap deeply)
+ }
+ override def toString = s"""
+ |object SupplementalLocations {
+ | platformTools = $platformTools
+ |}""".asLines
}
+ // called from scalap
def fromPathString(path: String, context: JavaContext = DefaultJavaContext): JavaClassPath = {
val s = new Settings()
s.classpath.value = path
@@ -153,18 +181,19 @@ object PathResolver {
}
else {
val settings = new Settings()
- val rest = settings.processArguments(args.toList, false)._2
+ val rest = settings.processArguments(args.toList, processAll = false)._2
val pr = new PathResolver(settings)
println(" COMMAND: 'scala %s'".format(args.mkString(" ")))
println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" ")))
- pr.result.show
+ pr.result.show()
}
}
}
-import PathResolver.{ Defaults, Environment, firstNonEmpty, ppcp }
class PathResolver(settings: Settings, context: JavaContext) {
- def this(settings: Settings) = this(settings, if (settings.inline.value) new JavaContext else DefaultJavaContext)
+ import PathResolver.{ Defaults, Environment, AsLines, MkLines, ppcp }
+
+ def this(settings: Settings) = this(settings, if (settings.inline) new JavaContext else DefaultJavaContext)
private def cmdLineOrElse(name: String, alt: String) = {
(commandLineFor(name) match {
@@ -188,6 +217,7 @@ class PathResolver(settings: Settings, context: JavaContext) {
object Calculated {
def scalaHome = Defaults.scalaHome
def useJavaClassPath = settings.usejavacp.value || Defaults.useJavaClassPath
+ def useManifestClassPath= settings.usemanifestcp.value
def javaBootClassPath = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath)
def javaExtDirs = cmdLineOrElse("javaextdirs", Defaults.javaExtDirs)
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
@@ -227,43 +257,37 @@ class PathResolver(settings: Settings, context: JavaContext) {
classesInPath(scalaBootClassPath), // 4. The Scala boot class path.
contentsOfDirsInPath(scalaExtDirs), // 5. The Scala extension class path.
classesInExpandedPath(userClassPath), // 6. The Scala application class path.
+ classesInManifest(useManifestClassPath), // 8. The Manifest class path.
sourcesInPath(sourcePath) // 7. The Scala source path.
)
lazy val containers = basis.flatten.distinct
- override def toString = """
+ override def toString = s"""
|object Calculated {
- | scalaHome = %s
- | javaBootClassPath = %s
- | javaExtDirs = %s
- | javaUserClassPath = %s
- | useJavaClassPath = %s
- | scalaBootClassPath = %s
- | scalaExtDirs = %s
- | userClassPath = %s
- | sourcePath = %s
- |}""".trim.stripMargin.format(
- scalaHome,
- ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
- useJavaClassPath,
- ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
- ppcp(sourcePath)
- )
+ | scalaHome = $scalaHome
+ | javaBootClassPath = ${ppcp(javaBootClassPath)}
+ | javaExtDirs = ${ppcp(javaExtDirs)}
+ | javaUserClassPath = ${ppcp(javaUserClassPath)}
+ | useJavaClassPath = $useJavaClassPath
+ | scalaBootClassPath = ${ppcp(scalaBootClassPath)}
+ | scalaExtDirs = ${ppcp(scalaExtDirs)}
+ | userClassPath = ${ppcp(userClassPath)}
+ | sourcePath = ${ppcp(sourcePath)}
+ |}""".asLines
}
def containers = Calculated.containers
lazy val result = {
val cp = new JavaClassPath(containers.toIndexedSeq, context)
- if (settings.Ylogcp.value) {
- Console.println("Classpath built from " + settings.toConciseString)
- Console.println("Defaults: " + PathResolver.Defaults)
- Console.println("Calculated: " + Calculated)
+ if (settings.Ylogcp) {
+ Console print f"Classpath built from ${settings.toConciseString} %n"
+ Console print s"Defaults: ${PathResolver.Defaults}"
+ Console print s"Calculated: $Calculated"
val xs = (Calculated.basis drop 2).flatten.distinct
- println("After java boot/extdirs classpath has %d entries:" format xs.size)
- xs foreach (x => println(" " + x))
+ Console print (xs mkLines (s"After java boot/extdirs classpath has ${xs.size} entries:", indented = true))
}
cp
}
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index 1b06ce2ff2..1d39a59cf4 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.tools.util
+package scala
+package tools.util
import java.net.{ ServerSocket, SocketException, SocketTimeoutException }
import java.io.{ PrintWriter, BufferedReader }
@@ -16,8 +17,8 @@ trait CompileOutputCommon {
def verbose: Boolean
def info(msg: String) = if (verbose) echo(msg)
- def echo(msg: String) = {Console println msg; Console.flush}
- def warn(msg: String) = {Console.err println msg; Console.flush}
+ def echo(msg: String) = {Console println msg; Console.flush()}
+ def warn(msg: String) = {Console.err println msg; Console.flush()}
def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
}
diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala
index d208a9f9c2..3c203e1cf2 100644
--- a/src/compiler/scala/tools/util/VerifyClass.scala
+++ b/src/compiler/scala/tools/util/VerifyClass.scala
@@ -3,7 +3,7 @@ package scala.tools.util
import scala.tools.nsc.io._
import java.net.URLClassLoader
import scala.collection.JavaConverters._
-
+import scala.language.postfixOps
object VerifyClass {
diff --git a/src/continuations/library/scala/util/continuations/ControlContext.scala b/src/continuations/library/scala/util/continuations/ControlContext.scala
index 44a5b537b6..c196809da9 100644
--- a/src/continuations/library/scala/util/continuations/ControlContext.scala
+++ b/src/continuations/library/scala/util/continuations/ControlContext.scala
@@ -183,7 +183,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
// need filter or other functions?
- final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = {
+ final def flatMapCatch[A1>:A,B1<:B,C1>:C<:B1](pf: PartialFunction[Exception, ControlContext[A1,B1,C1]]): ControlContext[A1,B1,C1] = { // called by codegen from SelectiveCPSTransform
if (fun eq null)
this
else {
@@ -209,7 +209,7 @@ final class ControlContext[+A,-B,+C](val fun: (A => B, Exception => B) => C, val
}
}
- final def mapFinally(f: () => Unit): ControlContext[A,B,C] = {
+ final def mapFinally(f: () => Unit): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform
if (fun eq null) {
try {
f()
diff --git a/src/continuations/library/scala/util/continuations/package.scala b/src/continuations/library/scala/util/continuations/package.scala
index 90bab56805..573fae85e7 100644
--- a/src/continuations/library/scala/util/continuations/package.scala
+++ b/src/continuations/library/scala/util/continuations/package.scala
@@ -166,8 +166,8 @@ package object continuations {
throw new NoSuchMethodException("this code has to be compiled with the Scala continuations plugin enabled")
}
- def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = {
- new ControlContext(null, x)
+ def shiftUnitR[A,B](x: A): ControlContext[A,B,B] = { // called in code generated by SelectiveCPSTransform
+ new ControlContext[A, B, B](null, x)
}
/**
@@ -176,11 +176,11 @@ package object continuations {
* a final result.
* @see shift
*/
- def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = {
+ def shiftR[A,B,C](fun: (A => B) => C): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform
new ControlContext((f:A=>B,g:Exception=>B) => fun(f), null.asInstanceOf[A])
}
- def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = {
+ def reifyR[A,B,C](ctx: => ControlContext[A,B,C]): ControlContext[A,B,C] = { // called in code generated by SelectiveCPSTransform
ctx
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 00c72cf423..f260ee4093 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -2,11 +2,10 @@
package scala.tools.selectivecps
-import scala.tools.nsc.Global
-import scala.tools.nsc.typechecker.Modes
+import scala.tools.nsc.{ Global, Mode }
import scala.tools.nsc.MissingRequirementError
-abstract class CPSAnnotationChecker extends CPSUtils with Modes {
+abstract class CPSAnnotationChecker extends CPSUtils {
val global: Global
import global._
import analyzer.{AnalyzerPlugin, Typer}
@@ -96,8 +95,8 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
if (!cpsEnabled) return bounds
- val anyAtCPS = newCpsParamsMarker(NothingClass.tpe, AnyClass.tpe)
- if (isFunctionType(tparams.head.owner.tpe) || isPartialFunctionType(tparams.head.owner.tpe)) {
+ val anyAtCPS = newCpsParamsMarker(NothingTpe, AnyTpe)
+ if (isFunctionType(tparams.head.owner.tpe_*) || isPartialFunctionType(tparams.head.owner.tpe_*)) {
vprintln("function bound: " + tparams.head.owner.tpe + "/"+bounds+"/"+targs)
if (hasCpsParamTypes(targs.last))
bounds.reverse match {
@@ -123,14 +122,14 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
import checker._
- override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Boolean = {
if (!cpsEnabled) return false
- vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
+ vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + mode + " / " + pt)
val annots1 = cpsParamAnnotation(tree.tpe)
val annots2 = cpsParamAnnotation(pt)
- if ((mode & global.analyzer.PATTERNmode) != 0) {
+ if (mode.inPatternMode) {
//println("can adapt pattern annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
if (!annots1.isEmpty) {
return true
@@ -139,7 +138,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/*
// not precise enough -- still relying on addAnnotations to remove things from ValDef symbols
- if ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode) != 0) {
+ if (mode.inAllModes(TYPEmode | BYVALmode)) {
if (!annots1.isEmpty) {
return true
}
@@ -148,16 +147,16 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/*
this interferes with overloading resolution
- if ((mode & global.analyzer.BYVALmode) != 0 && tree.tpe <:< pt) {
+ if (mode.inByValMode && tree.tpe <:< pt) {
vprintln("already compatible, can't adapt further")
return false
}
*/
- if ((mode & global.analyzer.EXPRmode) != 0) {
+ if (mode.inExprMode) {
if ((annots1 corresponds annots2)(_.atp <:< _.atp)) {
vprintln("already same, can't adapt further")
false
- } else if (annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.BYVALmode) == 0)) {
+ } else if (annots1.isEmpty && !annots2.isEmpty && !mode.inByValMode) {
//println("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
if (!hasPlusMarker(tree.tpe)) {
// val base = tree.tpe <:< removeAllCPSAnnotations(pt)
@@ -170,10 +169,10 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
true
//}
} else false
- } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && ((mode & global.analyzer.RETmode) != 0)) {
+ } else if (!hasPlusMarker(tree.tpe) && annots1.isEmpty && !annots2.isEmpty && typer.context.inReturnExpr) {
vprintln("checking enclosing method's result type without annotations")
tree.tpe <:< pt.withoutAnnotations
- } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && ((mode & global.analyzer.BYVALmode) != 0)) {
+ } else if (!hasMinusMarker(tree.tpe) && !annots1.isEmpty && mode.inByValMode) {
val optCpsTypes: Option[(Type, Type)] = cpsParamTypes(tree.tpe)
val optExpectedCpsTypes: Option[(Type, Type)] = cpsParamTypes(pt)
if (optCpsTypes.isEmpty || optExpectedCpsTypes.isEmpty) {
@@ -189,24 +188,20 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else false
}
- override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ override def adaptAnnotations(tree: Tree, typer: Typer, mode: Mode, pt: Type): Tree = {
if (!cpsEnabled) return tree
- vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
+ vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + mode + " / " + pt)
- val patMode = (mode & global.analyzer.PATTERNmode) != 0
- val exprMode = (mode & global.analyzer.EXPRmode) != 0
- val byValMode = (mode & global.analyzer.BYVALmode) != 0
- val retMode = (mode & global.analyzer.RETmode) != 0
-
- val annotsTree = cpsParamAnnotation(tree.tpe)
- val annotsExpected = cpsParamAnnotation(pt)
+ val annotsTree = cpsParamAnnotation(tree.tpe)
+ val annotsExpected = cpsParamAnnotation(pt)
+ def isMissingExpectedAnnots = annotsTree.isEmpty && annotsExpected.nonEmpty
// not sure I rephrased this comment correctly:
- // replacing `patMode` in the condition below by `patMode || ((mode & global.analyzer.TYPEmode) != 0 && (mode & global.analyzer.BYVALmode))`
+ // replacing `mode.inPatternMode` in the condition below by `mode.inPatternMode || mode.inAllModes(TYPEmode | BYVALmode)`
// doesn't work correctly -- still relying on addAnnotations to remove things from ValDef symbols
- if (patMode && !annotsTree.isEmpty) tree modifyType removeAllCPSAnnotations
- else if (exprMode && !byValMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) { // shiftUnit
+ if (mode.inPatternMode && annotsTree.nonEmpty) tree modifyType removeAllCPSAnnotations
+ else if (mode.typingExprNotValue && !hasPlusMarker(tree.tpe) && isMissingExpectedAnnots) { // shiftUnit
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having any possible annotation
//println("adapt annotations " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
@@ -218,29 +213,29 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val res = tree modifyType (_ withAnnotations newPlusMarker() :: annotsExpected) // needed for #1807
vprintln("adapted annotations (not by val) of " + tree + " to " + res.tpe)
res
- } else if (exprMode && byValMode && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
+ } else if (mode.typingExprByValue && !hasMinusMarker(tree.tpe) && annotsTree.nonEmpty) { // dropping annotation
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having no annotation
val res = tree modifyType addMinusMarker
vprintln("adapted annotations (by val) of " + tree + " to " + res.tpe)
res
- } else if (retMode && !hasPlusMarker(tree.tpe) && annotsTree.isEmpty && annotsExpected.nonEmpty) {
+ } else if (typer.context.inReturnExpr && !hasPlusMarker(tree.tpe) && isMissingExpectedAnnots) {
// add a marker annotation that will make tree.tpe behave as pt, subtyping wise
// tree will look like having any possible annotation
-
+
// note 1: we are only adding a plus marker if the method's result type is a cps type
// (annotsExpected.nonEmpty == cpsParamAnnotation(pt).nonEmpty)
// note 2: we are not adding the expected cps annotations, since they will be added
// by adaptTypeOfReturn (see below).
- val res = tree modifyType (_ withAnnotations List(newPlusMarker()))
+ val res = tree modifyType (_ withAnnotation newPlusMarker())
vprintln("adapted annotations (return) of " + tree + " to " + res.tpe)
res
} else tree
}
/** Returns an adapted type for a return expression if the method's result type (pt) is a CPS type.
- * Otherwise, it returns the `default` type (`typedReturn` passes `NothingClass.tpe`).
- *
+ * Otherwise, it returns the `default` type (`typedReturn` passes `NothingTpe`).
+ *
* A return expression in a method that has a CPS result type is an error unless the return
* is in tail position. Therefore, we are making sure that only the types of return expressions
* are adapted which will either be removed, or lead to an error.
@@ -363,7 +358,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
global.globalError("not a single cps annotation: " + xs)
xs(0)
}
-
+
def emptyOrSingleList(xs: List[AnnotationInfo]) = if (xs.isEmpty) Nil else List(single(xs))
def transChildrenInOrder(tree: Tree, tpe: Type, childTrees: List[Tree], byName: List[Tree]) = {
@@ -400,11 +395,13 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
- override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = {
import scala.util.control._
if (!cpsEnabled) {
- if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
+ val report = try hasCpsParamTypes(tpe) catch { case _: MissingRequirementError => false }
+ if (report)
global.reporter.error(tree.pos, "this code must be compiled with the Scala continuations plugin enabled")
+
return tpe
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
index 46c644bcd6..29480576ea 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSUtils.scala
@@ -7,7 +7,6 @@ import scala.tools.nsc.Global
trait CPSUtils {
val global: Global
import global._
- import definitions._
var cpsEnabled = false
val verbose: Boolean = System.getProperty("cpsVerbose", "false") == "true"
@@ -36,7 +35,7 @@ trait CPSUtils {
lazy val MarkerCPSAdaptMinus = rootMirror.getRequiredClass("scala.util.continuations.cpsMinus")
lazy val Context = rootMirror.getRequiredClass("scala.util.continuations.ControlContext")
- lazy val ModCPS = rootMirror.getRequiredPackage("scala.util.continuations")
+ lazy val ModCPS = rootMirror.getPackage("scala.util.continuations")
lazy val MethShiftUnit = definitions.getMember(ModCPS, cpsNames.shiftUnit)
lazy val MethShiftUnit0 = definitions.getMember(ModCPS, cpsNames.shiftUnit0)
@@ -57,12 +56,12 @@ trait CPSUtils {
protected def newMarker(sym: Symbol): AnnotationInfo = AnnotationInfo marker sym.tpe
protected def newCpsParamsMarker(tp1: Type, tp2: Type) =
- newMarker(appliedType(MarkerCPSTypes.tpe, List(tp1, tp2)))
+ newMarker(appliedType(MarkerCPSTypes, tp1, tp2))
// annotation checker
protected def annTypes(ann: AnnotationInfo): (Type, Type) = {
- val tp0 :: tp1 :: Nil = ann.atp.normalize.typeArgs
+ val tp0 :: tp1 :: Nil = ann.atp.dealiasWiden.typeArgs
((tp0, tp1))
}
protected def hasMinusMarker(tpe: Type) = tpe hasAnnotation MarkerCPSAdaptMinus
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
index 8b39bf3961..ae95a1bdac 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveANFTransform.scala
@@ -2,13 +2,10 @@
package scala.tools.selectivecps
-import scala.tools.nsc._
import scala.tools.nsc.transform._
import scala.tools.nsc.symtab._
import scala.tools.nsc.plugins._
-import scala.tools.nsc.ast._
-
/**
* In methods marked @cps, explicitly name results of calls to other @cps methods
*/
@@ -20,13 +17,14 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
import definitions._ // standard classes and methods
import typer.atOwner // methods to type trees
+ override def description = "ANF pre-transform for @cps"
+
/** the following two members override abstract members in Transform */
val phaseName: String = "selectiveanf"
protected def newTransformer(unit: CompilationUnit): Transformer =
new ANFTransformer(unit)
-
class ANFTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
implicit val _unit = unit // allow code in CPSUtils.scala to report errors
@@ -131,7 +129,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
def transformPureMatch(tree: Tree, selector: Tree, cases: List[CaseDef]) = {
val caseVals = cases map { case cd @ CaseDef(pat, guard, body) =>
- // if (!hasPlusMarker(body.tpe)) body.tpe = body.tpe withAnnotation newPlusMarker() // TODO: to avoid warning
+ // if (!hasPlusMarker(body.tpe)) body modifyType (_ withAnnotation newPlusMarker()) // TODO: to avoid warning
val bodyVal = transExpr(body, None, ext) // ??? triggers "cps-transformed unexpectedly" warning in transTailValue
treeCopy.CaseDef(cd, transform(pat), transform(guard), bodyVal)
}
@@ -172,7 +170,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
debuglog("transforming valdef " + vd.symbol)
if (getExternalAnswerTypeAnn(tpt.tpe).isEmpty) {
-
+
atOwner(vd.symbol) {
val rhs1 = transExpr(rhs, None, None)
@@ -402,9 +400,9 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
try {
val Some((a, b)) = cpsR
- /** Since shiftUnit is bounded [A,B,C>:B] this may not typecheck
- * if C is overly specific. So if !(B <:< C), call shiftUnit0
- * instead, which takes only two type arguments.
+ /* Since shiftUnit is bounded [A,B,C>:B] this may not typecheck
+ * if C is overly specific. So if !(B <:< C), call shiftUnit0
+ * instead, which takes only two type arguments.
*/
val conforms = a <:< b
val call = localTyper.typedPos(tree.pos)(
@@ -471,7 +469,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
val sym: Symbol = (
currentOwner.newValue(newTermName(unit.fresh.newName("tmp")), tree.pos, Flags.SYNTHETIC)
setInfo valueTpe
- setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil))
+ setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe_*, Nil, Nil))
)
expr.changeOwner(currentOwner -> sym)
@@ -503,9 +501,7 @@ abstract class SelectiveANFTransform extends PluginComponent with Transform with
// TODO: better yet: do without annotations on symbols
val spcVal = getAnswerTypeAnn(anfRhs.tpe)
- if (spcVal.isDefined) {
- tree.symbol.setAnnotations(List(AnnotationInfo(MarkerCPSSym.tpe, Nil, Nil)))
- }
+ spcVal foreach (_ => tree.symbol setAnnotations List(AnnotationInfo(MarkerCPSSym.tpe_*, Nil, Nil)))
(stms:::List(treeCopy.ValDef(tree, mods, name, tpt, anfRhs)), linearize(spc, spcVal)(unit, tree.pos))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
index 237159795a..c16cce2f2c 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -3,15 +3,11 @@
package scala.tools.selectivecps
import scala.tools.nsc
-import scala.tools.nsc.typechecker._
import nsc.Global
-import nsc.Phase
import nsc.plugins.Plugin
import nsc.plugins.PluginComponent
class SelectiveCPSPlugin(val global: Global) extends Plugin {
- import global._
-
val name = "continuations"
val description = "applies selective cps conversion"
@@ -26,7 +22,6 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
override val runsBefore = List("uncurry")
}
-
val components = List[PluginComponent](anfPhase, cpsPhase)
val checker = new CPSAnnotationChecker {
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
index 4482bf2b7c..908ffb3713 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSTransform.scala
@@ -2,13 +2,8 @@
package scala.tools.selectivecps
-import scala.collection._
-
-import scala.tools.nsc._
import scala.tools.nsc.transform._
import scala.tools.nsc.plugins._
-
-import scala.tools.nsc.ast.TreeBrowsers
import scala.tools.nsc.ast._
/**
@@ -22,6 +17,8 @@ abstract class SelectiveCPSTransform extends PluginComponent with
import definitions._ // standard classes and methods
import typer.atOwner // methods to type trees
+ override def description = "@cps-driven transform of selectiveanf assignments"
+
/** the following two members override abstract members in Transform */
val phaseName: String = "selectivecps"
@@ -56,7 +53,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
case _ =>
getExternalAnswerTypeAnn(tp) match {
case Some((res, outer)) =>
- appliedType(Context.tpe, List(removeAllCPSAnnotations(tp), res, outer))
+ appliedType(Context.tpeHK, List(removeAllCPSAnnotations(tp), res, outer))
case _ =>
removeAllCPSAnnotations(tp)
}
@@ -90,7 +87,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
//gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedSelect(gen.mkAttributedIdent(ScalaPackage),
//ScalaPackage.tpe.member("util")), ScalaPackage.tpe.member("util").tpe.member("continuations")), MethShiftR)
//gen.mkAttributedRef(ModCPS.tpe, MethShiftR) // TODO: correct?
- debuglog("funR.tpe = " + funR.tpe)
+ debuglog("funR.tpe: " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -102,12 +99,12 @@ abstract class SelectiveCPSTransform extends PluginComponent with
debuglog("found shiftUnit: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethShiftUnitR) // TODO: correct?
- debuglog("funR.tpe = " + funR.tpe)
+ debuglog("funR.tpe: " + funR.tpe)
Apply(
TypeApply(funR, List(targs(0), targs(1))).setType(appliedType(funR.tpe,
List(targs(0).tpe, targs(1).tpe))),
args.map(transform(_))
- ).setType(appliedType(Context.tpe, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
+ ).setType(appliedType(Context.tpeHK, List(targs(0).tpe,targs(1).tpe,targs(1).tpe)))
}
case Apply(TypeApply(fun, targs), args)
@@ -115,7 +112,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
log("found reify: " + tree)
atPos(tree.pos) {
val funR = gen.mkAttributedRef(MethReifyR) // TODO: correct?
- debuglog("funR.tpe = " + funR.tpe)
+ debuglog("funR.tpe: " + funR.tpe)
Apply(
TypeApply(funR, targs).setType(appliedType(funR.tpe, targs.map((t:Tree) => t.tpe))),
args.map(transform(_))
@@ -192,7 +189,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val targettp = transformCPSType(tree.tpe)
val pos = catches.head.pos
- val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass.tpe, List(ThrowableClass.tpe, targettp)))
+ val funSym = currentOwner.newValueParameter(cpsNames.catches, pos).setInfo(appliedType(PartialFunctionClass, ThrowableTpe, targettp))
val funDef = localTyper.typedPos(pos) {
ValDef(funSym, Match(EmptyTree, catches1))
}
@@ -200,7 +197,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
Apply(Select(expr1, expr1.tpe.member(cpsNames.flatMapCatch)), List(Ident(funSym)))
}
- val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableClass.tpe)
+ val exSym = currentOwner.newValueParameter(cpsNames.ex, pos).setInfo(ThrowableTpe)
import CODE._
// generate a case that is supported directly by the back-end
@@ -210,8 +207,8 @@ abstract class SelectiveCPSTransform extends PluginComponent with
IF ((REF(funSym) DOT nme.isDefinedAt)(REF(exSym))) THEN (REF(funSym) APPLY (REF(exSym))) ELSE Throw(REF(exSym))
)
- val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableClass.tpe, targettp)
- //typedCases(tree, catches, ThrowableClass.tpe, pt)
+ val catch2 = localTyper.typedCases(List(catchIfDefined), ThrowableTpe, targettp)
+ //typedCases(tree, catches, ThrowableTpe, pt)
patmatTransformer.transform(localTyper.typed(Block(List(funDef), treeCopy.Try(tree, treeCopy.Block(block1, stms, expr2), catch2, finalizer1))))
@@ -350,7 +347,7 @@ abstract class SelectiveCPSTransform extends PluginComponent with
val ctxSym = currentOwner.newValue(newTermName("" + vd.symbol.name + cpsNames.shiftSuffix)).setInfo(rhs1.tpe)
val ctxDef = localTyper.typed(ValDef(ctxSym, rhs1))
def ctxRef = localTyper.typed(Ident(ctxSym))
- val argSym = currentOwner.newValue(vd.symbol.name).setInfo(tpe)
+ val argSym = currentOwner.newValue(vd.symbol.name.toTermName).setInfo(tpe)
val argDef = localTyper.typed(ValDef(argSym, Select(ctxRef, ctxRef.tpe.member(cpsNames.getTrivialValue))))
val switchExpr = localTyper.typedPos(vd.symbol.pos) {
val body2 = mkBlock(bodyStms, bodyExpr).duplicate // dup before typing!
diff --git a/src/detach/library/scala/remoting/Channel.scala b/src/detach/library/scala/remoting/Channel.scala
deleted file mode 100644
index e60d16c0d5..0000000000
--- a/src/detach/library/scala/remoting/Channel.scala
+++ /dev/null
@@ -1,190 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Channel.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.remoting
-
-import java.io._
-import java.net._
-import java.rmi.server.RMIClassLoader
-
-/** <p>
- * The class <code>Channel</code> implements (basic) typed channels
- * which use <a href="http://java.sun.com/docs/books/tutorial/networking/sockets/"
- * target="_top"/>Java socket</a> communication and Scala type manifests to
- * provide type-safe send/receive operations between a localhost and another
- * remote machine by specifying some <code>host</code> and <code>port</code>.
- * </p>
- *
- * @author Stephane Micheloud
- * @version 1.1
- */
-class Channel protected (socket: Socket) {
-
- // Create a socket without a timeout
- def this(host: String, port: Int) = this(new Socket(host, port))
-
- // // Create a socket with a timeout
- // val sockaddr: SocketAddress = new InetSocketAddress(addr, port)
- // val socket = new Socket()
- // // If the timeout occurs, SocketTimeoutException is thrown.
- // socket.connect(sockaddr, 2000) // 2 seconds
-
- /** Returns the local address of this channel. */
- val host = socket.getInetAddress.getHostAddress
-
- /** Returns the port on which this channel is listening. */
- val port = socket.getLocalPort
-
- private var cl: ClassLoader =
- try {
- // requires permission in Java policy file
- val codebase = System.getProperty("java.rmi.server.codebase")
- if (codebase != null) info("codebase="+codebase)
- RMIClassLoader.getClassLoader(codebase)
- }
- catch {
- case e: Exception =>
- sys.error("Class loader undefined: " + e.getMessage)
- null
- }
- def classLoader: ClassLoader = cl
- def classLoader_=(x: ClassLoader) { cl = x }
-
- info(""+this)
-
- private class CustomObjectInputStream(in: InputStream)
- extends ObjectInputStream(in) {
- override def resolveClass(desc: ObjectStreamClass): Class[_] =
- if (cl eq null)
- super.resolveClass(desc)
- else
- try {
- info("resolve class "+desc.getName)
- cl loadClass desc.getName
- }
- catch {
- case e: ClassNotFoundException =>
- super.resolveClass(desc)
- }
- }
-
- // lazy modifier is required!
- private lazy val in =
- try {
- new CustomObjectInputStream(socket.getInputStream)
- }
- catch {
- case e: IOException =>
- sys.error("Input stream undefined: "+e.getMessage+" ("+this+")")
- null
- }
- private lazy val out =
- try {
- new ObjectOutputStream(socket.getOutputStream)
- }
- catch {
- case e: IOException =>
- sys.error("Output stream undefined: "+e.getMessage+" ("+this+")")
- null
- }
-
- /** <code>receive&lt;primtype&gt;</code> methods may throw an
- * <code>IOException</code>.
- */
- def receiveUnit = receive[Unit]
- def receiveBoolean = receive[Boolean]
- def receiveByte = receive[Byte]
- def receiveChar = receive[Char]
- def receiveShort = receive[Short]
- def receiveInt = receive[Int]
- def receiveLong = receive[Long]
- def receiveFloat = receive[Float]
- def receiveDouble = receive[Double]
- def receiveString = receive[String]
-
- /** <code>receive</code> method may throw either an
- * <code>ClassNotFoundException</code> or an <code>IOException</code>.
- *
- * @throw <code>ChannelException</code> if received value has not
- * the expected type.
- */
- @throws(classOf[ChannelException])
- def receive[T](implicit expected: scala.reflect.ClassTag[T]): T = {
- val found = in.readObject().asInstanceOf[reflect.ClassTag[_]]
- info("receive: found="+found+", expected="+expected)
- import scala.reflect.ClassTag
- val x = found match {
- case ClassTag.Unit => ()
- case ClassTag.Boolean => in.readBoolean()
- case ClassTag.Byte => in.readByte()
- case ClassTag.Char => in.readChar()
- case ClassTag.Short => in.readShort()
- case ClassTag.Int => in.readInt()
- case ClassTag.Long => in.readLong()
- case ClassTag.Float => in.readFloat()
- case ClassTag.Double => in.readDouble()
- case _ => in.readObject()
- }
- val res = if (found <:< expected)
- x.asInstanceOf[T]
- else
- throw new ChannelException(
- "\n\tfound \""+found+"\"\n\texpected \""+expected+"\"")
- info("received "+res+" (available="+in.available+")")
- res
- }
-
- /** <code>?</code> method may throw either an
- * <code>ClassNotFoundException</code> or an <code>IOException</code>.
- */
- def ?[T](implicit t: scala.reflect.ClassTag[T]): T = receive[T](t)
-
- /** <code>send</code> method may throw an <code>IOException</code>.
- */
- def send[T](x: T)(implicit t: scala.reflect.ClassTag[T]) {
- out writeObject t
- x match {
- case x: Unit => // nop
- case x: Boolean => out writeBoolean x
- case x: Byte => out writeByte x
- case x: Char => out writeChar x
- case x: Short => out writeShort x
- case x: Int => out writeInt x
- case x: Long => out writeLong x
- case x: Float => out writeFloat x
- case x: Double => out writeDouble x
- case x => out writeObject x
- }
- out.flush()
- info("sent "+x)
- }
-
- /** <code>!</code> method may throw an <code>IOException</code>.
- */
- def ![T](x: T)(implicit m: scala.reflect.ClassTag[T]) { send(x)(m) }
-
- def close() {
- try { socket.close() }
- catch { case e: IOException => }
- info(this+" closed")
- }
-
- override def toString: String = socket.toString
-
- private def info(msg: String) {
- runtime.remoting.Debug.info("[Channel] "+msg)
- }
-}
-
-/** <code>ChannelException</code> may be thrown by the operation
- * <code>receive</code> when the received data has not the expected type.
- */
-case class ChannelException(msg: String) extends IOException(msg)
-
diff --git a/src/detach/library/scala/remoting/Debug.scala b/src/detach/library/scala/remoting/Debug.scala
deleted file mode 100644
index 79f2bcedde..0000000000
--- a/src/detach/library/scala/remoting/Debug.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Debug.scala 17412 2009-03-31 10:08:25Z michelou $
-
-package scala.remoting
-
-/**
- * @author Stephane Micheloud
- * @version 1.0
- */
-object Debug extends runtime.remoting.Debug {
- private val f = new java.text.SimpleDateFormat("HH:mm:ss")
- private val c = new java.util.GregorianCalendar
-
- def getTime: String = f format c.getTime
-
- def getLocation(obj: AnyRef): String = {
- val s = obj.getClass().getClassLoader().toString()
- s substring s.indexOf('[')
- }
-}
diff --git a/src/detach/library/scala/remoting/ServerChannel.scala b/src/detach/library/scala/remoting/ServerChannel.scala
deleted file mode 100644
index 7828f85a1d..0000000000
--- a/src/detach/library/scala/remoting/ServerChannel.scala
+++ /dev/null
@@ -1,68 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: ServerChannel.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.remoting
-
-import java.net.{ServerSocket, Socket}
-
-/** <p>
- * Creates a server channel and binds its associated socket to the
- * specified port number.<br/>
- * Example:
- * </p><pre>
- * <b>class</b> ComputeChannel(s: Socket) <b>extends</b> Channel(s) {
- * <b>def</b> receiveFunc = receive[Int => Int]
- * }
- * <b>class</b> ComputeServer(p: Int)
- * <b>extends</b> AbstractServerChannel[ComputeChannel](p) {
- * <b>def</b> newChannel(s: Socket) = <b>new</b> ComputeChannel(s)
- * }</pre>
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class ServerChannel(p: Int) extends AbstractServerChannel[Channel](p) {
- def newChannel(s: Socket) = new Channel(s)
-}
-
-abstract class AbstractServerChannel[T <: Channel](_port: Int) {
-
- /** Creates an input channel and binds its associated socket to any
- * free port.
- */
- def this() = this(0)
-
- // The maximum queue length for incoming requests to connect is set to 50.
- private val serverSocket = new ServerSocket(_port)
-
- /** Returns the local address of this channel. */
- val host = serverSocket.getInetAddress.getHostAddress
-
- /** Returns the port on which this channel is listening. */
- val port = serverSocket.getLocalPort
- info("Listening on port "+port)
-
- protected def newChannel(socket: Socket): T
-
- def accept: T = {
- System.gc() // required!
- newChannel(serverSocket.accept)
- }
-
- def close() {
- try { serverSocket.close() }
- catch { case e: java.io.IOException => }
- info("Server socket "+host+":"+port+" closed")
- }
-
- protected def info(msg: String) {
- runtime.remoting.Debug.info("[ServerChannel] "+msg)
- }
-}
diff --git a/src/detach/library/scala/remoting/detach.scala b/src/detach/library/scala/remoting/detach.scala
deleted file mode 100644
index 51a3ac515d..0000000000
--- a/src/detach/library/scala/remoting/detach.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: detach.scala 16901 2009-01-13 15:37:05Z michelou $
-
-package scala.remoting
-
-
-/** The <code>detach</code> object is a <em>marker object</em> which informs
- * the Scala compiler that arguments whose type is a function type are
- * eligible for remote closure generation.
- *
- * @author Stephane Micheloud
- * @version 1.0, 13/07/2005
- */
-object detach {
-
- def apply[R](f: Function0[R]): Function0[R] = f
- def apply[T0, R](f: Function1[T0, R]): Function1[T0, R] = f
- def apply[T0, T1, R](f: Function2[T0, T1, R]): Function2[T0, T1, R] = f
- def apply[T0, T1, T2, R](f: Function3[T0, T1, T2, R]): Function3[T0, T1, T2, R] = f
- def apply[T0, T1, T2, T3, R](f: Function4[T0, T1, T2, T3, R]): Function4[T0, T1, T2, T3, R] = f
- def apply[T0, T1, T2, T3, T4, R](f: Function5[T0, T1, T2, T3, T4, R]): Function5[T0, T1, T2, T3, T4, R] = f
- def apply[T0, T1, T2, T3, T4, T5, R](f: Function6[T0, T1, T2, T3, T4, T5, R]): Function6[T0, T1, T2, T3, T4, T5, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, R](f: Function7[T0, T1, T2, T3, T4, T5, T6, R]): Function7[T0, T1, T2, T3, T4, T5, T6, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, R](f: Function8[T0, T1, T2, T3, T4, T5, T6, T7, R]): Function8[T0, T1, T2, T3, T4, T5, T6, T7, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, R](f: Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R]): Function9[T0, T1, T2, T3, T4, T5, T6, T7, T8, R] = f
-
- // since 2.7.0
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R](f: Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R]): Function10[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R](f: Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R]): Function11[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R](f: Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R]): Function12[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R](f: Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R]): Function13[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R](f: Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R]): Function14[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R](f: Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R]): Function15[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R](f: Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R]): Function16[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R](f: Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R]): Function17[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R](f: Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R]): Function18[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R](f: Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R]): Function19[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R](f: Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R]): Function20[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R](f: Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R]): Function21[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, R] = f
- def apply[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R](f: Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R]): Function22[T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, R] = f
-}
-
diff --git a/src/detach/library/scala/runtime/RemoteRef.scala b/src/detach/library/scala/runtime/RemoteRef.scala
deleted file mode 100644
index e65b22cb71..0000000000
--- a/src/detach/library/scala/runtime/RemoteRef.scala
+++ /dev/null
@@ -1,182 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteRef.scala 18365 2009-07-21 11:00:42Z michelou $
-
-package scala.runtime
-
-import java.net.{InetAddress, MalformedURLException}
-import java.rmi.{NoSuchObjectException, NotBoundException, Remote}
-import java.rmi.registry.{LocateRegistry, Registry}
-import java.rmi.server.{ExportException, RemoteObject, UnicastRemoteObject}
-
-import scala.runtime.remoting.{Debug, RemoteGC}
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-object RemoteRef { /*extends Thread {
- start()
-
- private class QuitException extends Exception
- private var isTerminated = false
-
- // keeps track of live remote objects
- val remoteGC = new RemoteGC
-
- override def run() {
- info("started thread")
- try {
- while (!isTerminated) {
- this.synchronized {
- try {
- wait(200)
- } catch {
- case _: InterruptedException =>
- if (isTerminated) throw new QuitException
- }
- remoteGC.gc()
- if (remoteGC.allClosed)
- throw new QuitException
- } // synchronized
-
- }
- } catch {
- case _: QuitException =>
- // allow thread to exit
- }
- }
-*/
- try {
- val prop = System.getProperty("sun.rmi.dgc.server.gcInterval")
- if (prop eq null)
- System.setProperty("sun.rmi.dgc.server.gcInterval", "10000")
- }
- catch {
- case e =>
- error(e.getMessage)
- }
-
- private val host =
- try {
- val prop = System.getProperty("java.rmi.server.hostname")
- if (prop ne null) prop else InetAddress.getLocalHost.getHostAddress
- }
- catch {
- case e =>
- warning(e.getMessage)
- InetAddress.getLocalHost.getHostAddress
- }
-
- private val port =
- try {
- val prop = System.getProperty("scala.remoting.port")
- if (prop ne null) prop.toInt else Registry.REGISTRY_PORT
- }
- catch {
- case e =>
- warning(e.getMessage)
- Registry.REGISTRY_PORT // default port
- }
-
- private val registry =
- try {
- LocateRegistry.createRegistry(port)
- }
- catch {
- case e =>
- warning(e.getMessage)
- LocateRegistry.getRegistry(host, port)
- }
-
- private val prefix = "//"+host+":"+port+"/"
- printDebugInfos
-
- // Variant 1: rebind/unbind
- def bind(name: String, x: Remote): Remote =
- try {
- registry.rebind(prefix+name, x)
- info("\""+prefix+name+"\" bound")
- val stub = RemoteObject.toStub(x)
- //remoteGC.newRef(stub)
- stub
- } catch {
- case e: MalformedURLException =>
- error(e.getMessage); null
- case e: ExportException =>
- info(""+e); null
- case e: Exception => // AlreadyBoundException, etc..
- throw e
- }
-
- def unbind(name: String) =
- try {
- registry.unbind(prefix+name)
- info("\""+name+"\" unbound")
- } catch {
- case e: java.io.EOFException =>
- warning(e.getMessage)
- case e: NotBoundException =>
- warning(e.getMessage+" already unbound")
- case e: MalformedURLException =>
- error(e.getMessage)
- case e: Exception =>
- throw e
- }
-/*
- // Variant 2: un-/exportObject
- def bind(name: String, x: Remote): Remote =
- try {
- val ex = UnicastRemoteObject.exportObject(x)
- registry.rebind(prefix+name, ex)
- info("\""+prefix+name+"\" bound")
- //val stub = RemoteObject.toStub(ex)
- //remoteGC.newRef(ex)
- ex //stub
- } catch {
- case e: MalformedURLException =>
- error(e.getMessage); null
- case e: ExportException =>
- info(""+e); null
- case e: Exception => // AlreadyBoundException, etc..
- throw e
- }
-
- def unbind(x: Remote) {
- try {
- UnicastRemoteObject.unexportObject(x, false)
- info("\""+x+"\" unbound")
- } catch {
- case e: java.io.EOFException =>
- warning(e.getMessage)
- case e: NotBoundException =>
- warning(e.getMessage+" already unbound")
- case e: MalformedURLException =>
- error(e.getMessage)
- case e: Exception =>
- throw e
- }
- }
-*/
- private def info(msg: String) { Debug.info("[RemoteRef] "+msg) }
- private def warning(msg: String) { Debug.warning("[RemoteRef] "+msg) }
- private def error(msg: String) { Debug.error("[RemoteRef] "+msg) }
-
- private def printDebugInfos() {
- def property(name: String): String =
- name+"="+(
- try { System.getProperty(name, "") }
- catch { case e => warning(e.getMessage); "?" })
- info(property("java.rmi.server.hostname"))
- info(property("sun.rmi.dgc.server.gcInterval"))
- info("registry="+registry)
- info("prefix="+prefix)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/Debug.scala b/src/detach/library/scala/runtime/remoting/Debug.scala
deleted file mode 100644
index 06cdc67997..0000000000
--- a/src/detach/library/scala/runtime/remoting/Debug.scala
+++ /dev/null
@@ -1,85 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: Debug.scala 17777 2009-05-19 18:16:25Z michelou $
-
-package scala.runtime.remoting
-
-/**
- * @author Stephane Micheloud
- * @version 1.0
- */
-object Debug extends Debug {
- override def info (msg: String) { if (lib) super.info(msg) }
- override def verbose(msg: String) { if (lib) super.verbose(msg) }
- override def warning(msg: String) { if (lib) super.warning(msg) }
- override def error (msg: String) { if (lib) super.error(msg) }
-}
-
-/**
- * @author Stephane Micheloud
- * @version 1.0
- */
-class Debug(tag: String) {
-
- def this() = this("")
-
- object Level extends Enumeration {
- type Level = Value
- val SILENT, ERROR, WARNING, VERBOSE, INFO = Value
- }
-
- private val level0 =
- try {
- val prop = System.getProperty("scala.remoting.logLevel")
- if (prop ne null) prop.toLowerCase else ""
- }
- catch {
- case e =>
- Console.err.println(e.getMessage)
- ""
- }
-
- import Level._
- protected var (lev, lib) = {
- val p = java.util.regex.Pattern.compile("(error|warning|verbose|info)(\\,lib)?(.*)")
- val m = p matcher level0
- val (s, b) =
- if (m.matches) (m.group(1), m.group(2) ne null)
- else ("", false)
- s match {
- case "error" => (ERROR , b)
- case "warning" => (WARNING, b)
- case "verbose" => (VERBOSE, b)
- case "info" => (INFO , b)
- case _ => (SILENT , false)
- }
- }
-
- def level = lev
- def level_= (lev: Level) = { this.lev = lev }
-
- private val tag0: String =
- if (tag != null & tag.length > 0) tag+" " else ""
-
- def info(msg: String) {
- if (lev >= INFO) Console.println(tag0 + "(info): " + msg)
- }
-
- def verbose(msg: String) {
- if (lev >= VERBOSE) Console.println(tag0 + "(verb): " + msg)
- }
-
- def warning(msg: String) {
- if (lev >= WARNING) Console.err.println(tag0 + "(warn): " + msg)
- }
-
- def error(msg: String) {
- if (lev >= ERROR) Console.err.println(tag0 + "(erro): " + msg)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala b/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
deleted file mode 100644
index 1105832ef7..0000000000
--- a/src/detach/library/scala/runtime/remoting/RegistryDelegate.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RegistryDelegate.scala 18234 2009-07-07 13:21:57Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.{RMISecurityManager, Remote, RemoteException}
-import java.rmi.registry.{LocateRegistry, Registry}
-import java.rmi.server.UnicastRemoteObject
-
-/**
- * <p>
- * This class implements the registry delegate concept
- * (see http://www.genady.net/rmi/v20/docs/delegate/RegistryDelegate.html)
- * </p>
- * <p>
- * In order to enforce some level of security, the standard RMI registry
- * implementation (e.g. <code>rmiregistry.exe</code>) only allows processes
- * on the same host to register objects in the registry (think of a bank
- * running a registry on one of its servers, and doesn't want anybody
- * modifying it). So, by design, if a process tries to
- * <code>bind(String, Remote)</code> an object to a remote registry,
- * an exception will be thrown.
- * </p>
- * <p>
- * However, the design of a distributed system may require remote clients to
- * register themselves in a central registry. If such system is deployed in a
- * controlled and trusted environment (e.g., a firewalled intranet with tight
- * access control), the security risk may be acceptable.
- * </p>
- * <p>
- * The simplest technical solution to the remote registration problem is to
- * have a registry delegate. A registry delegate is an object that serves as
- * a proxy for the real registry. The delegate itself usually appears in the
- * registry under a well known name. It implements the Registry interface and
- * simply delegates all method calls to the appropriate methods of the real
- * registry. The delegate is allowed to perform bind and unbind operations
- * because it is running on the same host as the registry.
- * </p>
- * <p>
- * The common scenario for starting a registry and creating the delegate is
- * starting a class with the following <code>main(Array[String])</code> method:
- * </p>
- * <pre>
- * @throws(classOf[AccessException], classOf[RemoteException], classOf[AlreadyBoundException])
- * <b>object</b> namingService {
- * <b>def</b> main(args: Array[String]) {
- * <b>if</b> (System.getSecurityManager() == <b>null</b>)
- * System.setSecurityManager(<b>new</b> RMISecurityManager())
- *
- * <b>val</b> registry = LocateRegistry.createRegistry(REGISTRY_PORT)
- * registry.bind(DELEGATE_NAME, <b>new</b> RegistryDelegate());
- *
- * do {
- * <b>try</b> {
- * Thread.sleep(Long.MAX_VALUE)
- * } <b>catch</b> {
- * <b>case</b> e: InterruptedException => // do nothing
- * <b>case</b> e: Throwable => e.printStackTrace(); sys.exit(1)
- * }
- * } while (<b>true</b>)
- * }
- * }</pre>
- * <p>
- * The common usage scenario looks something like:
- * </p><pre>
- * Registry remoteRegistry = LocateRegistry.getRegistry("remotehost.mycompany.com");
- * Registry delegate = (Registry) remoteRegistry.lookup(DELEGATE_NAME);
- * delegate.bind("someName", <b>new</b> SomeRemoteObject());</pre>
- * <p>
- * The <code>getRegistryDelegate(String)</code> method is a helper method
- * that fetches the registry delegate for you.
- * </p>
- * <p>
- * The <code>main(Array[String])</code> method of this class will create a
- * local registry on the default port, create a registry delegate and bind
- * it under the well known name that you chose in the wizard
- * (<code>DELEGATE_NAME</code>).
- * </p>
- *
- * @author Genady Beryozkin, rmi-info@genady.net
- */
-
-object RMIDelegate {
- /** The name under which the delegate appears in the registry. */
- val DELEGATE_NAME = "foo"
-
- /** This method retrieves the registry delegate from a registry that is
- * running on a remote host.
- */
- @throws(classOf[RemoteException])
- def getRegistryDelegate(remoteHost: String): Registry =
- getRegistryDelegate(remoteHost, Registry.REGISTRY_PORT)
-
- /** This method retrieves the registry delegate from a registry that is
- * running on a remote host.
- */
- @throws(classOf[RemoteException])
- def getRegistryDelegate(remoteHost: String, remotePort: Int): Registry = {
- val registry = LocateRegistry.getRegistry(remoteHost, remotePort)
- (registry lookup DELEGATE_NAME).asInstanceOf[Registry]
- }
-
- /** A simple way to run a registry and bind a registry delegate. */
- @throws(classOf[RemoteException])
- def main(args: Array[String]) {
- var port = Registry.REGISTRY_PORT
-
- if (args.length > 0) {
- if (args(0) equals "-help") {
- println("Usage: rmidelegate <options> <port>")
- sys.exit(0)
- }
- try {
- port = args(0).toInt
- } catch {
- case e: NumberFormatException =>
- println("Usage: rmidelegate <options> <port>")
- sys.exit(1)
- }
- val opts = args filter (_ startsWith "-J-D")
- for (opt <- opts) {
- val x = opt.substring(4) split "="
- if (x.length == 2) System.setProperty(x(0), x(1))
- else System.setProperty(x(0), "")
- }
- }
-
- if (System.getSecurityManager() == null)
- System.setSecurityManager(new RMISecurityManager() {
- override def checkPermission(p: java.security.Permission) {}
- })
-
-
- val registry = LocateRegistry.createRegistry(port)
- registry.bind(DELEGATE_NAME, new RegistryDelegate())
-
- do {
- try {
- Thread.sleep(Long.MaxValue)
- } catch {
- case e: InterruptedException =>
- // do nothing
- case e: Throwable =>
- e.printStackTrace()
- sys.exit(1)
- }
- } while (true)
- }
-
-}
-
-/** Create a delegate for a user provided registry instance. The registry is
- * assumed to be a local registry, as there is no point in creating a delegate
- * for a remote registry.
- */
-class RegistryDelegate(reg: Registry) extends UnicastRemoteObject with Registry {
- /** The local registry */
- private val localRegistry: Registry = reg
-
- /** Create a delegate for a local registry that is bound to the default
- * local port (1099).
- */
- def this() = this(LocateRegistry.getRegistry())
-
- /** Create a delegate for a local registry that is bound to a user
- * specified port.
- */
- def this(port: Int) = this(LocateRegistry.getRegistry(port))
-
- @throws(classOf[RemoteException])
- def bind(name: String, obj: Remote) { localRegistry.bind(name, obj) }
-
- @throws(classOf[RemoteException])
- def list(): Array[String] = localRegistry.list()
-
- @throws(classOf[RemoteException])
- def lookup(name: String): Remote = localRegistry.lookup(name)
-
- @throws(classOf[RemoteException])
- def rebind(name: String, obj: Remote) { localRegistry.rebind(name, obj) }
-
- @throws(classOf[RemoteException])
- def unbind(name: String) { localRegistry.unbind(name) }
-
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala b/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
deleted file mode 100644
index ff6c8f6b6c..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteBooleanRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteBooleanRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{BooleanRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteBooleanRef</code> provides a remote interface
- * for manipulating boolean references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteBooleanRef {
- def elem_=(value: Boolean)
- def elem: Boolean
-}
-
-/**
- * The class <code>RemoteBooleanRefImpl</code> implements a remote (global)
- * boolean reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.BooleanRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteBooleanRefImpl(name: String, x: BooleanRef)
-extends UnicastRemoteObject with RemoteBooleanRef with Unreferenced {
- def elem_=(value: Boolean) { x.elem = value }
- def elem: Boolean = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteBooleanRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala b/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
deleted file mode 100644
index 335f0d9019..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteByteRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteByteRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ByteRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteByteRef</code> provides a remote interface
- * for manipulating byte references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteByteRef {
- def elem_=(value: Byte)
- def elem: Byte
-}
-
-/**
- * The class <code>RemoteByteRefImpl</code> implements a remote (global)
- * byte reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ByteRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteByteRefImpl(name: String, x: ByteRef)
-extends UnicastRemoteObject with RemoteByteRef with Unreferenced {
- def elem_=(value: Byte) { x.elem = value }
- def elem: Byte = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteByteRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala b/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
deleted file mode 100644
index e0f48eb970..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteCharRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteCharRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{CharRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteCharRef</code> provides a remote interface
- * for manipulating character references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteCharRef {
- def elem_=(value: Char)
- def elem: Char
-}
-
-/**
- * The class <code>RemoteCharRefImpl</code> implements a remote (global)
- * character reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.CharRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteCharRefImpl(name: String, x: CharRef)
-extends UnicastRemoteObject with RemoteCharRef with Unreferenced {
- def elem_=(value: Char) { x.elem = value }
- def elem: Char = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteCharRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala b/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
deleted file mode 100644
index 2e1319595a..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteDoubleRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteDoubleRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{DoubleRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteDoubleRef</code> provides..
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteDoubleRef {
- def elem_=(value: Double)
- def elem: Double
-}
-
-/**
- * The class <code>RemoteDoubleRefImpl</code> implements a remote (global)
- * double reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.DoubleRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteDoubleRefImpl(name: String, x: DoubleRef)
-extends UnicastRemoteObject with RemoteDoubleRef with Unreferenced {
- def elem_=(value: Double) { x.elem = value }
- def elem: Double = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteDoubleRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala b/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
deleted file mode 100644
index f4e61ea6da..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteFloatRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteFloatRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{FloatRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteFloatRef</code> provides a remote interface
- * for manipulating float references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteFloatRef {
- def elem_=(value: Float)
- def elem: Float
-}
-
-/**
- * The class <code>RemoteFloatRefImpl</code> implements a remote (global)
- * float reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.FloatRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteFloatRefImpl(name: String, x: FloatRef)
-extends UnicastRemoteObject with RemoteFloatRef with Unreferenced {
- def elem_=(value: Float) { x.elem = value }
- def elem: Float = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteIntFloatImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteGC.scala b/src/detach/library/scala/runtime/remoting/RemoteGC.scala
deleted file mode 100644
index 393c031bfc..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteGC.scala
+++ /dev/null
@@ -1,66 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteGC.scala 17547 2009-04-21 13:56:28Z michelou $
-
-package scala.runtime.remoting
-
-import java.lang.ref.{Reference, WeakReference, ReferenceQueue}
-import java.rmi.{NoSuchObjectException, Remote}
-import java.rmi.server.UnicastRemoteObject
-import scala.collection.mutable
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-// Adapted from scala.actors.ActorGC
-private [runtime] class RemoteGC {
-
- private val refQueue = new ReferenceQueue[Remote]
- private val refSet = new mutable.HashSet[Reference[T] forSome { type T <: Remote }]
-
- private var liveRefs = 0
-
- def newRef(a: Remote) = synchronized {
- refSet += new WeakReference(a, refQueue)
- liveRefs += 1
- info("added object reference \""+a+"\" ("+liveRefs+")")
- }
-
- def gc() = synchronized {
- info("GC called ("+liveRefs+")")
- // check for unreachable object references
- def drain() {
- val wr = refQueue.poll
- if (wr != null) {
- val msg = try {
- UnicastRemoteObject.unexportObject(wr.get, true/*force*/)
- "removed object reference"
- }
- catch {
- case e: NoSuchObjectException =>
- "object already unbound"
- }
- info(msg+" ("+liveRefs+")")
- liveRefs -= 1
- refSet -= wr
- // continue draining
- drain()
- }
- }
- drain()
- }
-
- def allClosed: Boolean = synchronized {
- liveRefs <= 0
- }
-
- private def info(msg: String) { Debug.info("[RemoteGC] "+msg) }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala b/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
deleted file mode 100644
index b14403f6ca..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteIntRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteIntRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{IntRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteIntRef</code> provides a remote interface
- * for manipulating integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteIntRef {
- def elem_=(value: Int)
- def elem: Int
-}
-
-/**
- * The class <code>RemoteIntRefImpl</code> implements a remote (global)
- * integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.IntRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteIntRefImpl(name: String, x: IntRef)
-extends UnicastRemoteObject with RemoteIntRef with Unreferenced {
- def elem_=(value: Int) { x.elem = value }
- def elem: Int = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteIntRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala b/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
deleted file mode 100644
index da83491489..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteLongRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteLongRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{LongRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteLongRef</code> provides a remote interface
- * for manipulating long integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteLongRef {
- def elem_=(value: Long)
- def elem: Long
-}
-
-/**
- * The class <code>RemoteLongRefImpl</code> implements a remote (global)
- * long integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.LongRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteLongRefImpl(name: String, x: LongRef)
-extends UnicastRemoteObject with RemoteLongRef with Unreferenced {
- def elem_=(value: Long) { x.elem = value }
- def elem: Long = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteLongRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala b/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
deleted file mode 100644
index 9f27b26114..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteObjectRef.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteObjectRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ObjectRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteObjectRef</code> provides a remote interface
- * for manipulating object references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteObjectRef {
- def elem_=(value: AnyRef)
- def elem: AnyRef
-}
-
-/**
- * The class <code>RemoteObjectRefImpl</code> implements a remote (global)
- * object reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ObjectRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface to automatically
- * remove the no more referenced binding from the registry.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteObjectRefImpl(name: String, x: ObjectRef)
-extends UnicastRemoteObject with RemoteObjectRef with Unreferenced {
- def elem_=(value: AnyRef) { x.elem = value }
- def elem: AnyRef = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteObjectRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala b/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
deleted file mode 100644
index 2ced9dbc83..0000000000
--- a/src/detach/library/scala/runtime/remoting/RemoteShortRef.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id: RemoteShortRef.scala 18398 2009-07-28 14:26:36Z michelou $
-
-package scala.runtime.remoting
-
-import java.rmi.server.{UnicastRemoteObject, Unreferenced}
-import scala.runtime.{ShortRef, RemoteRef}
-
-/**
- * The trait Remote<code>RemoteShortRef</code> provides a remote interface
- * for manipulating short integer references.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@remote
-trait RemoteShortRef {
- def elem_=(value: Short)
- def elem: Short
-}
-
-/**
- * The class <code>RemoteShortRefImpl</code> implements a remote (global)
- * short integer reference by inheriting from the class
- * <code>UnicastRemoteObject</code>.
- *
- * In particular, it forwards method invocations to the <code>elem</code>
- * accessors of class <code>runtime.ShortRef</code> and implements the
- * <code>java.rmi.server.Unreferenced</code> interface.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-class RemoteShortRefImpl(name: String, x: ShortRef)
-extends UnicastRemoteObject with RemoteShortRef with Unreferenced {
- def elem_=(value: Short) { x.elem = value }
- def elem: Short = x.elem
- override def toString() = x.elem.toString
- def unreferenced() {
- Debug.info("[RemoteShortRefImpl] unreferenced: "+this)
- RemoteRef.unbind(name)
- }
-}
diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala
deleted file mode 100644
index f9a3d80da4..0000000000
--- a/src/detach/plugin/scala/tools/detach/Detach.scala
+++ /dev/null
@@ -1,1190 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.detach
-
-import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.ListBuffer
-import scala.tools.nsc._
-import scala.tools.nsc.plugins.PluginComponent
-import scala.tools.nsc.symtab.Flags._
-import scala.tools.nsc.transform._
-
-abstract class Detach extends PluginComponent
- with Transform with TypingTransformers {
- import global._
- import definitions._
-
- /** the following two members override abstract members in Transform */
- val phaseName: String = "detach"
-
- protected def newTransformer(unit: CompilationUnit): Transformer =
- new DetachTransformer(unit)
-
- // set with the `-P:detach:enable` plugin option (see DetachPlugin) */
- protected[detach] var isEnabled = false
-
- private class DetachTransformer(unit: CompilationUnit)
- extends TypingTransformer(unit) {
- private val DEBUG = settings.debug.value
- private val PROXY_PREFIX = "proxy$" // local proxy objects
- private val PROXY_SUFFIX = "$proxy" // top-level proxy classes
- private val DETACH_SUFFIX = "$detach" // detached closures
- private val IMPL_SUFFIX = "Impl" // follows Java convention
-
- private val nme_bind = newTermName("bind")
- private val nme_unbind = newTermName("unbind")
- private val nme_unreferenced = newTermName("unreferenced")
-
- private val Functions = FunctionClass.toList // see method isFuncType
-
- private val RemoteClass =
- definitions.getClass("java.rmi.Remote")
-
- private val UIDClass =
- definitions.getClass("java.rmi.server.UID")
-
- private val UnicastRemoteObjectClass =
- definitions.getClass("java.rmi.server.UnicastRemoteObject")
-
- private val UnreferencedClass =
- definitions.getClass("java.rmi.server.Unreferenced")
-
- private val DetachModule =
- definitions.getModule("scala.remoting.detach")
-
- private val DebugModule =
- definitions.getModule("scala.remoting.Debug")
-
- private val RemoteRefModule =
- definitions.getModule("scala.runtime.RemoteRef")
-
- private val ThreadModule =
- definitions.getModule("java.lang.Thread")
-
- private val UnicastRemoteObjectModule =
- definitions.getModule("java.rmi.server.UnicastRemoteObject")
-
- private val remoteAnnotationInfo = {
- val RemoteAttr: Symbol = definitions.getClass("scala.remote")
- AnnotationInfo(RemoteAttr.tpe, List(), List())
- }
-
- private val serializableAnnotationInfo =
- AnnotationInfo(SerializableAttr.tpe, List(), List())
-/*
- private val throwsAnnotationInfo = {
- val RemoteExceptionClass = definitions.getClass("java.rmi.RemoteException")
- val ThrowsAttr = definitions.getClass("scala.throws")
- AnnotationInfo(
- ThrowsAttr.tpe,
- List(Literal(Constant(RemoteExceptionClass.tpe))),
- List()
- )
- }
-*/
- // todo: see generation of Java version UID
- private def serialVersionUIDAnnotationInfo(clazz: Symbol) = {
- def genHash(sym: Symbol): Long = {
- val sym1 = if (sym.isConstructor) sym.owner else sym
- val ts = sym.tpe match {
- case MethodType(params, rt) => (params map (_.tpe)) ::: List(rt)
- case t => List(t)
- }
- val hashes = sym1.nameString.hashCode ::
- (ts map (_.typeSymbol.nameString.hashCode))
- (0L /: hashes)((acc, h) => acc ^ h)
- }
- val hashes = for (sym <- clazz.info.decls.toList) yield genHash(sym)
- val uid: Long = (0L /: hashes) ((acc, h) => acc * 41 + h)
- val serialVersionUIDAttr = definitions.getClass("scala.SerialVersionUID")
- AnnotationInfo(
- serialVersionUIDAttr.tpe,
- List(Literal(Constant(uid))),
- List()
- )
- }
-
- private def elems(suffix: String): List[(Symbol, Symbol)] =
- for (clazz <- ObjectRefClass :: refClass.valuesIterator.toList) yield {
- val name = "scala.runtime.remoting.Remote" + clazz.name + suffix
- (clazz, definitions.getClass(name))
- }
- private val remoteRefClass = immutable.HashMap(elems(""): _*)
- private val remoteRefImpl = immutable.HashMap(elems("Impl"): _*)
-
- private val proxyInterfaceDefs = new mutable.HashMap[Symbol/*owner*/, ListBuffer[Tree]]
- private val detachedClosureApply = new mutable.HashMap[Tree, Apply]
-
- private type SymSet = mutable.HashSet[Symbol]
- private val capturedObjects = new mutable.HashMap[Symbol/*clazz*/, SymSet]
- private val capturedFuncs = new mutable.HashMap[Symbol/*clazz*/, SymSet]
- private val capturedCallers = new mutable.HashMap[Symbol/*clazz*/, SymSet]
- private val capturedThisClass = new mutable.HashMap[Symbol, Symbol]
-
- private val proxies = new mutable.HashMap[
- Symbol, //clazz
- (Symbol, Symbol, mutable.HashMap[Symbol, Symbol]) //iface, impl, accessor map
- ]
- def toInterface(clazz: Symbol) = proxies(clazz)._1
- private val classdefs = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
- // detachedClosure gathers class definitions containing a "detach" apply
- private val detachedClosure = new mutable.HashMap[Symbol/*clazz*/, ClassDef]
-
- /** <p>
- * The method <code>freeObjTraverser.traverse</code> is invoked
- * in the method <code>DetachPlugin.transformUnit</code> in order to
- * gather information about objects referenced inside a detached
- * closure and which will be accessed remotely through object proxies.
- * </p>
- * <p>
- * Object proxies are generated in method <code>mkClosureApply</code>
- * and their definitions are generated in method <code>genProxy</code>.
- * </p>
- */
- private val freeObjTraverser = new Traverser {
- def symSet(f: mutable.HashMap[Symbol, SymSet], sym: Symbol): SymSet = f.get(sym) match {
- case Some(ss) => ss
- case None => val ss = new mutable.HashSet[Symbol]; f(sym) = ss; ss
- }
- def getClosureApply(tree: Tree): Apply = tree match {
- case Block(_, expr) => getClosureApply(expr)
- case Typed(expr, _) => getClosureApply(expr)
- case apply @ Apply(Select(_, _), _) => apply // sel="<init>" or some "f$0"
- case Apply(fun, _) => getClosureApply(fun)
- case _ =>
- throw new Error("getClosureApply: unhandled case " + tree)
- }
- def isFuncType(tp: Type): Boolean = tp match {
- case TypeRef(pre, sym, args) =>
- Functions contains sym.tpe.typeSymbol
- case _ =>
- false
- }
- def isOuterMember(sym: Symbol): Boolean =
- sym.isOuterAccessor ||
- sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/)
- override def traverse(tree: Tree) {
- val sym = tree.symbol
- val owner =
- if (currentOwner.isModule) currentOwner
- else currentOwner.enclClass
- tree match {
- case cdef @ ClassDef(_, _, _, impl) =>
- classdefs(sym) = cdef
- super.traverse(impl)
- if (detachedClosure contains sym) {
- detachedClosure(sym) = cdef
- symSet(capturedObjects, sym) += capturedThisClass(sym)
- }
-
- case Apply(Select(qual, _), List(arg))
- if (qual.tpe <:< DetachModule.tpe) =>
- assert(isFuncType(arg.tpe))//debug
- val t = getClosureApply(arg)
- if (!t.fun.symbol.isConstructor)
- unit.error(t.pos, "detach inapplicable for " +t.fun.symbol)
- val sym = t.fun.symbol.owner
- capturedThisClass(sym) = owner
- symSet(capturedFuncs, sym)
- detachedClosureApply(tree) = t
- classdefs get sym match {
- case None =>
- detachedClosure(sym) = null // set later in case ClassDef
- case Some(cdef) =>
- detachedClosure(sym) = cdef
- symSet(capturedObjects, sym) += capturedThisClass(sym)
- }
- super.traverse(arg)
-
- case Select(qual @ This(_), name)
- if qual.symbol.isModuleClass && !qual.symbol.isPackageClass =>
- val qsym = qual.symbol
- symSet(capturedFuncs, owner) += sym
- symSet(capturedObjects, owner) += qsym
-
- case Select(qual, name)
- if (qual.hasSymbol &&
- (sym.owner != owner) &&
- !(sym.ownerChain contains ScalaPackageClass) &&
- !(sym.owner hasFlag JAVA)) =>
- val qsym = qual.symbol
- symSet(capturedFuncs, owner) += sym
- if (qsym.isStaticModule && !qsym.isPackage) {
- //println("*****1******* capturedObjects("+owner+") += "+qsym)
- symSet(capturedObjects, owner) += qsym
- }
- else if (!isOuterMember(qsym) && !(qsym isNestedIn owner)) {
- //println("*****3******* capturedCallers("+sym+") += "+qsym)
- symSet(capturedCallers, sym) += qsym
- }
-
- case _ =>
- super.traverse(tree)
- }
- }
- } //freeObjTraverser
-
- private val valueClass = immutable.HashMap(
- (for ((sym, ref) <- refClass.toList) yield (ref, sym)): _*
- ) + (ObjectRefClass -> ObjectClass)
-
- private def toValueClass(tp: Type): Type =
- if (isRefClass(tp)) valueClass(tp.typeSymbol).tpe
- else if (proxies contains tp.typeSymbol) toInterface(tp.typeSymbol).tpe
- else tp
-
- private def isRefClass(tp: Type): Boolean =
- (tp ne null) &&
- ((refClass.valuesIterator contains tp.typeSymbol) || (ObjectRefClass eq tp.typeSymbol))
-
- private def isRemoteRefClass(tp: Type): Boolean =
- (tp ne null) && (remoteRefClass.valuesIterator contains tp.typeSymbol)
-
- private def mkRemoteRefClass(tp: Type): Type = {
- assert(isRefClass(tp))
- val tp1 = remoteRefClass(tp.typeSymbol)
- typeRef(tp1.typeConstructor.prefix, tp1, Nil) // after erasure, no type anymore!
- }
-
- class TreeOuterSubstituter(from: List[Symbol], to: List[Symbol]) extends Traverser {
- if (DEBUG)
- println("\nTreeOuterSubstituter:"+
- "\n\tfrom="+from.mkString(",")+
- "\n\tto="+to.mkString(","))
- val substMap = new mutable.HashMap[Symbol, Symbol]
- override def traverse(tree: Tree) {
- def subst(from: List[Symbol], to: List[Symbol]) {
- if (!from.isEmpty)
- if (tree.symbol.tpe == from.head.tpe) {
- if (DEBUG)
- println("\nTreeOuterSubstituter\n\tsym="+tree.symbol+
- ", tpe="+tree.symbol.tpe+
- "\n\towner="+tree.symbol.owner)
- tree.symbol updateInfo to.head.tpe
- }
- else tree.symbol.tpe match {
- case MethodType(params, restp) =>
- for (p <- params if p.tpe == from.head.tpe) {
- p updateInfo to.head.tpe
- }
- if (restp == from.head.tpe) {
- if (DEBUG)
- println("\nTreeOuterSubstituter(2)\n\tsym="+tree.symbol+
- ", tpe="+tree.symbol.tpe+
- ", owner="+tree.symbol.owner)
- tree.symbol updateInfo MethodType(params, to.head.tpe)
- }
- case _ =>
- subst(from.tail, to.tail)
- }
- }
- def isOuter(sym: Symbol): Boolean =
- sym.isOuterAccessor ||
- sym.name.endsWith(nme.OUTER/*, nme.OUTER.length*/)
- if (tree.hasSymbol && isOuter(tree.symbol)) subst(from, to)
- super.traverse(tree)
- }
- }
-
- // based on class Trees.TreeTypeSubstituter
- private class TreeTypeRefSubstituter(clazz: Symbol) extends Traverser {
- override def traverse(tree: Tree) {
- val sym = tree.symbol
- if (tree.hasSymbol && isRefClass(sym.tpe) &&
- (sym.owner.enclClass == clazz) &&
- (sym.isValueParameter || sym.hasFlag(PARAMACCESSOR))) {
- sym setInfo mkRemoteRefClass(sym.tpe)
- tree.tpe = sym.tpe
- }
- if (isRefClass(tree.tpe))
- tree.tpe = mkRemoteRefClass(tree.tpe)
- super.traverse(tree)
- }
- override def apply[T <: Tree](tree: T): T = super.apply(tree)
- }
-
- private class TreeOwnerSubstituter(from: Symbol, to: Symbol) extends Traverser {
- def substType(sym: Symbol): Type = {
- def subst(tpe: Type): Type = tpe match {
- case MethodType(params, restp) =>
- println("TreeOwnerSubstituter[1]: tpe="+tpe+
- ", tpe.typeSymbol="+tpe.typeSymbol+", sym="+sym)//debug
- for (p <- params if p.tpe == from.tpe) {
- println("TreeOwnerSubstituter[2]: sym="+sym+
- ", sym.owner="+sym.owner+", p.tpe="+p.tpe)//debug
- p updateInfo to.tpe
- }
- MethodType(params, subst(restp))
- case _ =>
- if (sym.owner == from && tpe == from.tpe) {
- println("TreeOwnerSubstituter[3]: sym="+sym+
- ", owner="+sym.owner+", tpe="+tpe)//debug
- to.tpe
- } else tpe
- }
- subst(sym.tpe)
- }
- val map = new mutable.HashMap[Symbol, Symbol]
- override def traverse(tree: Tree) {
- if (tree.hasSymbol && tree.symbol != NoSymbol) {
- val sym = tree.symbol
- if (sym.owner == from) {
- val sym1 = map get sym match {
- case Some(s) => s
- case None => val s = sym.cloneSymbol(to); map(sym) = s; s
- }
- tree setSymbol sym1
- }
- val sym1 = tree.symbol
- val tp = substType(sym1)
- if (tp != sym1.tpe) {
- if (sym1.owner == to)
- println("\n%%%%%1%%%%%%% TreeOwnerSubst: tree="+tree+", sym1="+sym1+", sym1.owner="+sym1.owner)//debug
- sym1 setInfo tp
- tree setSymbol sym1
- }
- }
- super.traverse(tree)
- }
- //override def apply[T <: Tree](tree: T): T = super.apply(tree/*.duplicate*/)
- }
-
- private var inConstructorFlag = 0L
-
- private def isCaptured(clazz: Symbol, sym: Symbol): Boolean =
- if (capturedFuncs contains clazz) {
- //log("**1** isCaptured: clazz="+clazz+", sym="+sym+", ")
- capturedFuncs(clazz) contains sym
- }
- else {
- //log("**2** isCaptured: clazz="+clazz+", sym="+sym)
- sym.isMethod && !sym.isConstructor
- }
-
- private class TreeAccessorSubstituter(clazz: Symbol, objs: List[Symbol], proxySyms: List[Symbol])
- extends Transformer {
- def removeAccessors(tree: Tree): Tree = tree match {
- case Apply(fun, _) =>
- removeAccessors(fun)
- case Select(qual, _) if tree.hasSymbol && tree.symbol.isOuterAccessor =>
- removeAccessors(qual)
- case _ =>
- tree
- }
- if (DEBUG)
- println("\nTreeAccessorSubstituter: "+
- "\n\tobjs="+objs.mkString(",")+
- "\n\tproxies="+proxySyms.mkString(","))
- override def transform(tree: Tree): Tree = tree match {
- // transforms field assignment $outer.i$1.elem=..
- // into setter $outer.i$1_=(..)
- case Assign(lhs @ Select(qual1 @ Select(qual, name), name1), rhs)
- if qual1.hasSymbol && !qual1.symbol.isPrivateLocal &&
- isRemoteRefClass(qual1.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Assign1\n\tqual1="+qual1+", sel.tpe="+lhs.tpe+
- "\n\tqual1.tpe="+qual1.tpe+", name1="+name1+
- "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug
- val iface = toInterface(qual.tpe.typeSymbol)
- val sym = iface.tpe.decls lookup nme.getterToSetter(name)
- atPos(tree.pos)(Apply(
- Select(super.transform(qual), sym) setType lhs.tpe,
- List(super.transform(rhs))
- ) setType tree.tpe)
-
- // transforms local assignment this.x$1.elem=..
- // into setter method this.x$1_=(..)
- case Assign(lhs @ Select(qual, name), rhs)
- if qual.hasSymbol && qual.symbol.isPrivateLocal &&
- isRemoteRefClass(qual.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Assign2"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tname="+name)
- // substitute the 'elem' member of the reference class with
- // the corresponding setter method of the remote reference class.
- val qual1 = super.transform(qual)
- val sym = qual1.tpe.decls lookup nme.getterToSetter(name)
- val fun = gen.mkAttributedSelect(qual1, sym)
- Apply(fun, List(super.transform(rhs))) setType lhs.tpe
-
- case Assign(Select(qual, name), rhs)
- if qual.hasSymbol && (objs contains qual.symbol) =>
- val sym = qual.symbol
- val proxy = proxySyms(objs indexOf sym)
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Assign3"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
- "\n\tname="+name)//debug
- // substitute the member accessor of the enclosing class with
- // the corresponding setter method of the detached interface.
- val iface = toInterface(sym)
- val substSymbols = new TreeSymSubstituter(
- sym.info.decls.toList filter { isCaptured(sym, _) },
- iface.info.decls.toList)
- substSymbols(Apply(
- Select(Ident(proxy), nme.getterToSetter(name)),
- List(super.transform(rhs))))
-
- // transforms setter invocation this.i$1_=(..)
- // into setter invocation $outer.i$1_=(..)
- case Apply(Select(qual @ This(_), name), args)
- if (objs contains qual.symbol) && nme.isSetterName(name) =>
- val proxy = proxySyms(objs indexOf qual.symbol)
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Apply"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
- "\n\tname="+name+", decoded="+name.decode)
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
- val sym1 = proxy.info.decls lookup name.decode
- val fun = gen.mkAttributedSelect(qual1, sym1)
- Apply(fun, args map (super.transform(_))) setType tree.tpe
-
- // transforms access to field this.name$1
- // into invocation of getter method $outer.name$1()
- case Select(qual @ This(_), name)
- if objs contains qual.symbol =>
- val proxy = proxySyms(objs indexOf qual.symbol)
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select"+
- "\n\tqual="+qual+", qual.tpe="+qual.tpe+
- "\n\tproxy="+proxy+", proxy.tpe="+proxy.tpe+
- "\n\tname="+name+", decoded="+name.decode)
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
- val sym1 = proxy.info.decls lookup nme.originalName(name) //name
- gen.mkAttributedSelect(qual1, sym1)
-
- // transforms field $outer.name$1 into getter method $outer.name$1()
- case Select(qual @ Select(_, name1), name)
- if qual.hasSymbol && name1.endsWith(nme.OUTER/*, nme.OUTER.length*/) &&
- !tree.symbol.isMethod =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select0\n\tqual="+qual+
- ", qual.tpe="+qual.tpe+", name="+name)//debug
- val sym = qual.symbol
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(sym.owner), sym)
- val iface = toInterface(qual.tpe.typeSymbol)
- val sym1 = iface.tpe.decls lookup name
- val fun = gen.mkAttributedSelect(qual1, sym1)
- Apply(fun, List()) setType tree.tpe
-
- case Select(apply @ Apply(fun @ Select(qual, _), _), name)
- if fun.symbol.isOuterAccessor =>
- val tsym = fun.symbol.tpe.resultType.typeSymbol
- val funcs = capturedFuncs(clazz).toList filter (sym =>
- (tsym.ownerChain contains sym.owner) || (tsym isSubClass sym.owner))
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select1\n\tfun="+fun+
- ",\n\tfun.tpe="+fun.tpe+", name="+name+
- ",\n\tfuncs="+funcs)//debug
- funcs find (tree.symbol.==) match {
- case Some(sym) =>
- val qual1 =
- if (currentOwner.enclClass isNestedIn clazz) apply
- else removeAccessors(qual)
- val name1 =
- (if (tsym isSubClass qual1.tpe.typeSymbol) ""
- else tsym.fullName('$')+"$")+sym.name
- val iface = toInterface(qual1.tpe.typeSymbol)
- val sym1 = iface.tpe.decls lookup name1
- gen.mkAttributedSelect(qual1, sym1)
- case None =>
- super.transform(tree)
- }
-
- // transforms field access $outer.i$1.elem
- // into invocation of getter method $outer.i$1()
- case Select(qual @ Select(qual1, name1), name)
- if qual.hasSymbol && !qual.symbol.isPrivateLocal &&
- isRemoteRefClass(qual.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select2\n\tqual="+qual+
- "\n\tqual.tpe="+qual.tpe+", tree.tpe="+tree.tpe)//debug
- val iface = toInterface(qual.symbol.owner)
- val sym1 = iface.tpe.decls lookup name1
- val fun = gen.mkAttributedSelect(qual1, sym1)
- Apply(fun, List()) setType tree.tpe
-
- // transforms local access this.i$1.elem
- // into invocation of getter method this.i$1()
- case Select(qual, name)
- if qual.hasSymbol && qual.symbol.isPrivateLocal &&
- isRemoteRefClass(qual.tpe) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select3\n\tqual="+qual+
- "\n\tqual.tpe="+qual.tpe)//debug
- val sym = qual.tpe.decls lookup name
- val fun = gen.mkAttributedSelect(qual, sym)
- Apply(fun, List()) setType tree.tpe
-
- case Select(qual, name)
- if qual.hasSymbol && (objs contains qual.symbol) =>
- if (DEBUG)
- println("\nTreeAccessorSubstituter: Select4\n\tqual="+qual+
- ", qual.tpe="+qual.tpe+", name="+name)//debug
- val sym = qual.symbol
- val proxy = proxySyms(objs indexOf sym)
- // substitute the accessor of a member of the enclosing class
- // with the corresponding accessor of the detached interface
- val qual1 = gen.mkAttributedSelect(gen.mkAttributedThis(proxy.owner), proxy)
- val iface = toInterface(sym)
- val sym1 = iface.tpe.decls lookup name.decode
- gen.mkAttributedSelect(qual1, sym1)
-
- case _ =>
- super.transform(tree)
- }
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
- } // TreeAccessorSubstituter
-/*
- private class TreeNameSubstituter(from: Name, to: Symbol) extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Super(qual, mix) if tree.symbol.name == from =>
- Super(qual, mix) setSymbol to
- case This(name) if name == from =>
- This(to.name) setSymbol to
- case _ =>
- super.transform(tree)
- }
- def apply[T <: Tree](tree: T): T = transform(tree).asInstanceOf[T]
- }
-*/
- /** <p>
- * Given the closure definition (generated by previous phases)
- * </p><pre>
- * class $anonfun$1 extends Object with Function1 {
- * def this($outer: C, x$1: Int): $anonfun$1 = ..
- * def apply(x: Int): Int = x + this.$outer.x() + this.x$1
- * }</pre>
- * <p>
- * the method <code>mkClosureDef</code> transforms the above code
- * to the following:
- * </p><pre>
- * @serializable
- * class $anonfun$1$detach extends Object with Function1 {
- * def this($outer: C$proxy, x$1: Int): $anonfun$1$detach = ..
- * def apply(x: Int): Int = x + this.$outer.x() + this.x$1
- * }</pre>
- * <p>
- * In particular, it performs the following operations:
- * 1) add constructor parameter <code>proxy_n</code> to access
- * proxy of the enclosing class
- * 2) change reference types in constructor arguments to type
- * <code<Remote_type_Ref</code>'
- * 3) change occurences of <code>this</code> identifier to
- * <code>proxy_n</code> in template code
- * 4) change reference types of local value definitions associated
- * to updated constructor arguments to type <code>Remote_type_Ref</code>
- * </p>
- *
- * @param clazz the symbol of the original closure definition
- * @return the typed class definition for the detached closure.
- */
- private def mkClosureDef(clazz: Symbol): Tree = {
- val cdef = detachedClosure(clazz)
- val name = cdef.symbol.name
- if (name endsWith DETACH_SUFFIX)
- return cdef // closure already detached
-
- clazz.name = encode(clazz.name.decode + DETACH_SUFFIX)
- clazz addAnnotation serialVersionUIDAnnotationInfo(clazz)
- clazz addAnnotation serializableAnnotationInfo
-
- val thiz = capturedThisClass(clazz)
- val (List(outer), captured) =
- capturedObjects(clazz).toList partition (thiz.==)
-
- /** <p>
- * Method <code>updateConstructorParams</code> updates the class
- * symbol of the detached closure as follows:
- * 1) it appends the "$detach" suffix to the class name,
- * 2) it adds the "@serializable" annotation to class attributes,
- * 3) it adds a parameter symbol for each element of "captured".
- * </p>
- * <p>
- * and also updates the signature of the constructor symbol:
- * 1) it adds a parameter type for each element of "captured",
- * 2) it changes reference types to remote reference types.
- * </p>
- */
- def updateConstructorParams(vparams: List[ValDef]): List[Symbol] = {
- val hasOuter = !vparams.isEmpty && (vparams.head.symbol.tpe == thiz.tpe)
- val ctor = clazz.primaryConstructor
- val params = (for (sym <- captured) yield {
- val iface = toInterface(sym)
- val param = ctor.newValueParameter(ctor.pos, freshProxyName)
- .setFlag(SYNTHETIC)
- .setInfo(iface.tpe)
- param.owner = ctor
- param
- }) ::: (
- if (hasOuter) Nil
- else {
- val iface = toInterface(thiz)
- val param = ctor.newValueParameter(ctor.pos, nme.OUTER)
- .setFlag(SYNTHETIC)
- .setInfo(iface.tpe)
- param.owner = ctor
- List(param)
- }
- )
- val tp = ctor.tpe match {
- case mt @ MethodType(params1, restp) =>
- val params2 = if (hasOuter) {
- val iface = toInterface(params1.head.tpe.typeSymbol)
- ctor.newSyntheticValueParam(iface.tpe) :: params1.tail
- }
- else params1
- for (p <- params2 if isRefClass(p.tpe)) {
- p updateInfo mkRemoteRefClass(p.tpe)
- }
- MethodType(params ::: params2, restp)
- case tp =>
- tp
- }
- ctor updateInfo tp
- params
- } //updateConstructorParams
-
- /**
- */
- def updateConstructorDef(ctor: DefDef): (List[Tree], List[Symbol]) = {
- val DefDef(mods, name, tparams, List(vparams), tpt, rhs) = ctor
- val newparams = updateConstructorParams(vparams)
- val vparams0 = newparams map (sym => ValDef(sym) setType sym.tpe)
- val ctorDef = treeCopy.DefDef(ctor, mods, name, tparams, List(vparams0 ::: vparams), tpt, rhs)
- val accessors = for (sym <- newparams) yield {
- val acc = clazz.newValue(sym.pos, sym.name)
- .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
- .setInfo(sym.tpe)
- clazz.info.decls enter acc
- acc
- }
- val accDefs = accessors map (sym => ValDef(sym) setType sym.tpe)
- (ctorDef :: accDefs, accessors)
- } //updateConstructorDef
-
- val impl = cdef.impl
- val (List(ctor: DefDef), body1) = impl.body partition (t =>
- t.isDef && t.symbol.isPrimaryConstructor)
- val (defs, accessors) = updateConstructorDef(ctor)
- val impl1 = treeCopy.Template(impl, impl.parents, impl.self, defs ::: body1)
- val (from, to) = /*List.unzip*/(
- for (obj <- captured ::: List(outer))
- yield (obj, toInterface(obj))
- ) unzip
- //val substNames = new TreeNameSubstituter(name, clazz)
- val substTypeRefs = new TreeTypeRefSubstituter(clazz)
- val substAccs = new TreeAccessorSubstituter(clazz, from, accessors)
- val substTypes = new TreeOuterSubstituter(from, to)
- val substSyms = new TreeSymSubstituter(from, to)
- val t1 = ClassDef(clazz, substSyms(substTypes(substAccs(substTypeRefs(impl1)))))
- //println("mkClosureDef: t(untyped)=\n"+nodeToString(t1))
- val t = localTyper typed t1
- detachedClosure(clazz) = t.asInstanceOf[ClassDef]
- //println("mkClosureDef: t(typed)=\n"+nodeToString(t))
- t
- } //mkClosureDef
-
- /** <p>
- * Given a class <code>C</code> with member <code>x</code>
- * which is (remotely) referenced from inside a detached closure:
- * </p><pre>
- * class C extends .. {
- * var x: Int
- * }</pre>
- * <p>
- * the method <code>addProxy</code> generates the following two
- * proxy definitions (used later in method <code>mkClosureApply</code>
- * to generate object proxies):
- * </p><pre>
- * trait C$proxy extends java.rmi.Remote {
- * def x(): Int
- * def x_=(x$1: Int): Unit
- * }
- * class C$proxyImpl
- * extends java.rmi.server.UnicastRemoteObject
- * with C$proxy with java.rmi.server.Unreferenced {
- * def this(x$0: String, x$1: C): C$ProxyImpl = ..
- * def x(): Int = this.x$1.x()
- * def x_=(x$1: Int): Unit = this.x$1.x_=(x$1)
- * def unreferenced(): Unit = RemoteRef.unbind(this.x$0)
- * }</pre>
- */
- private def addProxy(closure: Symbol, clazz: Symbol) {
- // the Sun RMI compiler crashes with the error message
- // "error: An error has occurred in the compiler; ..." with trace
- // "sun.tools.java.CompilerError: getInnerClassField" if the
- // generated proxy class does not belong to the top-level scope.
- val proxyOwner = clazz.toplevelClass.owner //clazz.owner
-
- if (DEBUG)
- println("\nadd proxy for "+clazz+" in "+proxyOwner)//debug
-
- val (proxyIntf, proxyImpl, proxyMap) = proxies get clazz match {
- case Some(proxy) =>
- proxy
- case None =>
- val iface =
- proxyOwner.newClass(clazz.pos, encode(clazz.name.decode + PROXY_SUFFIX))
- iface.sourceFile = clazz.sourceFile
- iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface
- val iparents = List(ObjectClass.tpe, RemoteClass.tpe)
- iface setInfo ClassInfoType(iparents, newScope, iface)
- // methods must throw RemoteException
- iface addAnnotation remoteAnnotationInfo
-
- val iclaz =
- proxyOwner.newClass(clazz.pos, encode(iface.name.decode + IMPL_SUFFIX))
- iclaz.sourceFile = clazz.sourceFile
- iclaz setFlag (SYNTHETIC | FINAL)
- // Variant 1: rebind/unbind
- val cparents = List(UnicastRemoteObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
- // Variant 2: un-/exportObject
- //val cparents = List(ObjectClass.tpe, iface.tpe, UnreferencedClass.tpe)
- iclaz setInfo ClassInfoType(cparents, newScope, iclaz)
- val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol])
- proxies(clazz) = proxy
- proxy
- }
-
- def addAccessors() {
- def mkGetter(sym: Symbol, name: String): Symbol = {
- val getter = if (sym.isMethod) {
- val meth = sym.cloneSymbol(proxyIntf)
- meth.name = name
- val tsym = meth.tpe.resultType.typeSymbol
- if (proxies contains tsym)
- meth updateInfo MethodType(List(), toInterface(tsym).tpe)
- meth
- }
- else {
- val meth = proxyIntf.newMethod(sym.pos, nme.getterName(sym.originalName))
- meth setFlag ACCESSOR
- meth setInfo MethodType(List(), toValueClass(sym.tpe))
- meth
- }
- getter setFlag ABSTRACT
- getter resetFlag FINAL
- getter
- }
- def mkSetter(sym: Symbol): Symbol = {
- val setter = proxyIntf.newMethod(sym.pos, nme.getterToSetter(sym.originalName))
- setter setFlag (sym.flags & ~(PRIVATE | LOCAL) | ACCESSOR | lateDEFERRED)
- val param = setter.newSyntheticValueParam(toValueClass(sym.tpe))
- setter setInfo MethodType(List(param), UnitClass.tpe)
- setter setFlag ABSTRACT
- setter resetFlag FINAL
- setter
- }
- def create(owner: Symbol, clazz: Symbol) {
- val funcs = capturedFuncs(owner).toList
- funcs find (_.isConstructor) match {
- case Some(sym) if capturedFuncs contains sym.owner =>
- create(sym.owner, clazz)
- case _ =>
- }
- val newfuncs = funcs filterNot (proxyMap.valuesIterator.toList contains)
- val (members, others) = newfuncs partition (clazz isSubClass _.owner)
- val outers = others filter (sym =>
- (clazz isNestedIn sym.owner) && clazz.isClass)
- for (sym <- outers) {
- val sym1 = mkGetter(sym, sym.fullName('$'))
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }/*
- for (sym <- outers if capturedCallers contains sym;
- caller <- capturedCallers(sym)) {
- val sym1 = mkGetter(sym, caller.nameString+'$'+sym.nameString)
- if (clazz.isAnonymousClass)
- println("[2] clazz="+clazz+", sym1="+sym1)
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }*/
- for (sym <- members if !sym.isConstructor) {
- val sym1 = mkGetter(sym, sym.originalName.decode)
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }
- for (sym <- members if isRefClass(sym.tpe)) {
- val sym1 = mkSetter(sym)
- proxyIntf.info.decls enter sym1
- proxyMap(sym1) = sym
- }
- }
- create(closure, clazz)
- }
-
- addAccessors
- if (DEBUG) {
- val xs = proxyMap.keysIterator.toList
- println("\tadded "+proxyIntf+
- "\n\twith "+xs.mkString(", ")+" ["+xs.length+"]")
- }
- } //addProxy
-
- def genProxy(clazz: Symbol) {
- val (proxyIntf, proxyImpl, proxyMap) = proxies(clazz)
-
- // generate proxy interface
- val ifaceBody = proxyMap.keysIterator.toList map { DefDef(_, EmptyTree) }
- val ifaceParents =
- proxyIntf.info.parents map (t => TypeTree(t) setPos proxyIntf.pos)
- val ifaceTmpl = Template(ifaceParents, emptyValDef, ifaceBody)
- val ifaceDef = localTyper typed ClassDef(proxyIntf, ifaceTmpl)
-
- // generated proxy implementation
- // Variant 1: rebind/unbind
- val param1 =
- proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$"))
- .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
- .setInfo(StringClass.tpe)
- proxyImpl.info.decls enter param1
-
- val param2 =
- proxyImpl.newValueParameter(proxyImpl.pos, freshName("x$"))
- .setFlag(SYNTHETIC | PARAMACCESSOR | PRIVATE | LOCAL)
- .setInfo(clazz.tpe)
- proxyImpl.info.decls enter param2
-
- val unreferenced =
- proxyImpl.newMethod(proxyImpl.pos, nme_unreferenced)
- .setInfo(MethodType(List(), UnitClass.tpe))
- proxyImpl.info.decls enter unreferenced
-
- val proxyBody =
- DefDef(unreferenced, List(List()), Block(
- List(Apply( //stats
- Select(gen.mkAttributedRef(DebugModule), "info"),
- List(Apply(
- Select(Literal(Constant("unreferenced: ")), "$plus"),
- // Variant 1: rebind/unbind
- List(Select(This(proxyImpl), param1.name))
- // Variant 2: un-/exportObject
- //List(This(proxyImpl))
- ))
- )),
- Apply( //expr
- Select(gen.mkAttributedRef(RemoteRefModule), nme_unbind),
- // Variant 1: rebind/unbind
- List(Select(This(proxyImpl), param1.name))
- // Variant 2: un-/exportObject
- //List(This(proxyImpl))
- )
- )) :: (
- for (sym <- proxyIntf.info.decls.toList) yield {
- val sym1 = sym.cloneSymbol(proxyImpl)
- sym1 resetFlag (ABSTRACT | DEFERRED | lateDEFERRED)
- proxyImpl.info.decls enter sym1
- DefDef(sym1, {
- val sym2 = proxyMap(sym)
- var t = Select(This(proxyImpl), param2)
- var outerAcc =
- if (sym2.owner isSubClass param2) None
- else param2.info.decls.toList find (_.isOuterAccessor)
- while (!outerAcc.isEmpty) {
- t = Select(t, outerAcc.get)
- val outerClass = outerAcc.get.tpe.resultType.typeSymbol
- outerAcc =
- if (sym2.owner == outerClass) None
- else outerClass.info.decls.toList find (_.isOuterAccessor)
- }
- val sel = Select(t, sym2)
- if (sym2.isMethod) {
- Apply(sel, sym1.paramss(0) map { Ident(_) })
- }
- else if (isRefClass(sym2.tpe)) {
- val sel1 = Select(sel, nme.elem)
- if (sym1.tpe.paramTypes.length == 0) sel1
- else Assign(sel1, Ident(sym1.paramss(0)(0)))
- }
- else
- sel
- })
- })
- val proxyParents =
- proxyImpl.info.parents map (t => TypeTree(t) setPos proxyImpl.pos)
- val proxyTmpl = Template(proxyParents,
- emptyValDef, NoMods,
- // Variant 1: rebind/unbind
- /*vparamss*/ List(List(ValDef(param1), ValDef(param2))),
- // Variant 2: un-/exportObject
- ///*vparamss*/ List(List(ValDef(param2))),
- /*argss*/ List(List()), proxyBody, NoPosition)
- val proxyDef = localTyper typed ClassDef(proxyImpl, proxyTmpl)
-
- // remember definitions to be added by transformStats
- val proxyOwner = proxyIntf.owner
- if (! (proxyInterfaceDefs contains proxyOwner))
- proxyInterfaceDefs(proxyOwner) = new ListBuffer
- proxyInterfaceDefs(proxyOwner) += ifaceDef
- proxyInterfaceDefs(proxyOwner) += proxyDef
- } //genProxy
-
- private def freshName(s: String): Name =
- unit.fresh.newName(s)
-
- private def freshProxyName: Name =
- unit.fresh.newName(PROXY_PREFIX)
-
- /** <p>
- * Given a detached closure applied in some environment consisting
- * of an enclosing class <code>C</code> and some local variables
- * <code>x$1</code> (immutable) and <code>y$1</code> (mutable):
- * </p><pre>
- * scala.remoting.detach.apply({
- * (new $anonfun$1(C.this, x$1, y$1): Function1)
- * })</pre>
- * <p>
- * the above code is transformed to the following block:
- * </p><pre>
- * {
- * val proxy$1: C$Proxy =
- * RemoteRef.bind("C/proxy$1", new C$ProxyImpl(C.this))
- * val proxy$2: RemoteIntRef =
- * RemoteRef.bind("C/proxy$2", new RemoteIntRefImpl(y$1))
- * (new $anonfun$1detach(proxy$1, x$1, proxy$2): Function1)
- * }
- * </pre>
- */
- private def mkClosureApply(tree: Tree): Tree = {
- val apply @ Apply(fun, args) = detachedClosureApply(tree)
- assert(fun.symbol.isConstructor, fun.symbol+" is not a constructor")//debug
- val clazz = apply.tpe.typeSymbol
- val thiz = capturedThisClass(clazz)
- val cdef = mkClosureDef(clazz)
- val uid = localTyper typed {
- val sym = currentOwner.newValue(tree.pos, freshName("uid$"))
- .setFlag(SYNTHETIC)
- .setInfo(StringClass.tpe)
- val rhs = Apply(Select(
- Apply(
- Select(New(TypeTree(UIDClass.tpe)), nme.CONSTRUCTOR),
- List()
- ),
- "toString"
- ), List())
- ValDef(sym, rhs)
- }
- def cast(tree: Tree, tpe: Type): Tree =
- Apply(
- TypeApply(
- Select(tree, Object_asInstanceOf),
- List(TypeTree(tpe))
- ),
- List()
- )
-
- def mkProxy(csym: Symbol): ValDef = {
- val (iface, proxy, _) = proxies(csym)
- val sym = currentOwner.newValue(csym.pos, freshProxyName)
- .setFlag(SYNTHETIC)
- .setInfo(iface.tpe)
- val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind)
- val name = Apply(
- Select(Literal(Constant(sym.fullName('/')+"$")), String_+),
- List(Ident(uid.symbol))
- )
- val thiz =
- if (csym.isModule) gen.mkAttributedIdent(csym)
- else gen.mkAttributedThis(csym)
- val args = List(name,
- Apply(Select(New(TypeTree(proxy.tpe)), nme.CONSTRUCTOR),
- // Variant 1: rebind/unbind
- List(name, thiz)))
- // Variant 2: un-/exportObject
- //List(thiz)))
- val rhs = cast(Apply(bind, args), iface.tpe)
- ValDef(sym, rhs)
- }
-
- def mkObjProxies: List[ValDef] = {
- val (outer, captured) =
- capturedObjects(clazz).toList partition (thiz.==)
- (captured ::: outer) map mkProxy
- }
-
- def mkArgProxies: Map[Symbol, ValDef] = {
- def retRefs(t: Tree): List[Tree] = t match {
- case Apply(fun, args) =>
- args flatMap retRefs
- case id @ Ident(_) =>
- if (isRefClass(id.tpe)) List(id) else Nil
- case Template(_, _, body) =>
- body flatMap retRefs
- case New(tpt) =>
- retRefs(tpt)
- case thiz @ This(_) =>
- if (isRefClass(thiz.tpe)) List(thiz) else Nil
- case _ =>
- throw new Error("Internal error: " + t.getClass)
- }
- new immutable.HashMap[Symbol, ValDef] ++ (
- for (variable <- retRefs(apply)) yield {
- val param = variable.symbol
- assert(isRefClass(param.tpe), param)
- val proxy = currentOwner.newValue(param.pos, freshProxyName)
- .setFlag(SYNTHETIC)
- .setInfo(mkRemoteRefClass(param.tpe))
- val bind = Select(gen.mkAttributedRef(RemoteRefModule), nme_bind)
- //val name = Literal(Constant(proxy.fullName('/')))
- val name = Apply(
- Select(Literal(Constant(proxy.fullName('/')+"$")), String_+),
- List(Ident(uid.symbol))
- )
- val ts = param.tpe.typeSymbol
- val args = List(name,
- Apply(
- Select(New(TypeTree(remoteRefImpl(ts).tpe)), nme.CONSTRUCTOR),
- // Variant 1: rebind/unbind
- List(name, variable)))
- // Variant 2: un-/exportObject
- //List(variable)))
- val rhs = cast(Apply(bind, args), remoteRefClass(ts).tpe)
- (param, ValDef(proxy, rhs))
- }
- )
- } //mkArgProxies
-
- /** <p>
- * Method <code>mkClosureInstance</code> updates the list of actual
- * parameters passed to the closure instance.
- * </p>
- */
- def mkClosureInstance(objProxies: List[ValDef],
- argProxies: Map[Symbol, ValDef]): Tree = {
- fun.tpe = fun.symbol.tpe
- val args0 = objProxies map (tree => Ident(tree.symbol))
- val hasOuter = !args.isEmpty && (args.head.symbol.tpe == thiz.tpe)
- val args1 = (if (hasOuter) args.tail else args) map (arg =>
- argProxies get arg.symbol match {
- case Some(t) => Ident(t.symbol)
- case None => arg
- }
- )
- if (DEBUG)
- println("\nmkClosureInstance:\n\targs0="+args0+"\n\targs1="+args1)
- val t = Typed(
- Apply(fun, args0 ::: args1),
- //TypeTree(clazz.info.parents.tail.head) //interface (2.7.x)
- TypeTree(clazz.info.parents.head) //interface (2.8.x)
- )
- localTyper typed t
- } //mkClosureInstance
-
- val objProxies = mkObjProxies
- val argProxies = mkArgProxies
- val stats = uid :: objProxies ::: argProxies.valuesIterator.toList
- val expr = mkClosureInstance(objProxies, argProxies)
- localTyper typed Block(stats, expr)
- } //mkClosureApply
-
- override def transform(tree: Tree): Tree = {
- def withInConstructorFlag(inConstructorFlag: Long)(f: => Tree): Tree = {
- val savedInConstructorFlag = this.inConstructorFlag
- this.inConstructorFlag = inConstructorFlag
- val t = f
- this.inConstructorFlag = savedInConstructorFlag
- t
- }
- if (!isEnabled) return tree
- tree match {
- case ClassDef(mods, name, tparams, impl) =>
- val tree1 = super.transform(tree)
- if (!reporter.hasErrors && (capturedThisClass contains tree1.symbol))
- mkClosureDef(tree1.symbol)
- else
- tree1
-
- case Apply(Select(_, _), _) =>
- val tree1 = super.transform(tree)
- if (!reporter.hasErrors && (detachedClosureApply contains tree1))
- atPos(tree1.pos)(mkClosureApply(tree1))
- else
- tree1
-
- case Template(_, _, _) =>
- withInConstructorFlag(0) { super.transform(tree) }
-
- case _ =>
- super.transform(tree)
- }
- }
-
- /** Transform statements and add detached definitions to them. */
- override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
- val stats1 = super.transformStats(stats, exprOwner)
- val newDefs = {
- val buf = new ListBuffer[Tree]
- if (proxyInterfaceDefs contains currentOwner)
- buf ++= proxyInterfaceDefs(currentOwner).toList
- buf.toList
- }
- if (newDefs.isEmpty) stats1 else stats1 ::: newDefs
- }
-
- private def genProxies() {
- def printDebugInfo() {
- println("\ncompilation unit : "+unit)
- for ((sym, _) <- detachedClosure) {
- println("closure to detach: "+sym+" (owner: "+sym.owner+")")
- println("captured this : "+capturedThisClass(sym))
- val objs = capturedObjects get sym match {
- case Some(ss) => ss.toList
- case None => Nil
- }
- println("captured objects : "+objs.mkString(", ")+" ["+objs.length+"]")
- }
- println("\ncalled functions :")
- for (sym <- capturedFuncs.keysIterator) {
- val xs = capturedFuncs(sym).toList map (s => {
- val callers = capturedCallers get s match {
- case Some(ss) => "|"+ss.toList.mkString(",")
- case None => ""
- }
- s+"("+s.owner.name+callers+")"
- })
- println("\t"+sym+" -> "+xs.mkString(", ")+" ["+xs.length+"]")
- }
- }
- def printDebugInfo2() {
- println("\nproxy classes :")
- for (sym <- proxies.keysIterator)
- println("\t"+sym+"("+sym.tpe+") -> "+proxies(sym))
- }
- if (DEBUG)
- printDebugInfo
- for ((closure, _) <- detachedClosure;
- captured <- capturedObjects(closure))
- addProxy(closure, captured)
- if (DEBUG)
- printDebugInfo2
- for (sym <- proxies.keysIterator)
- genProxy(sym)
- } //genProxies
-
- /** <p>
- * Method <code>transformUnit</code> performs three successive operations:
- * </p>
- * <ol>
- * <li>it first gathers infos about free objects and detached
- * closures;</li>
- * <li>it then adds proxies for free objects;</li>
- * <li>finally, if transforms detached closures (both definition and
- * instantiation).</li>
- * </ol>
- */
- override def transformUnit(unit: CompilationUnit) {
- freeObjTraverser.traverse(unit.body)
- if (!reporter.hasErrors) genProxies
- super.transformUnit(unit)
- }
- }
-
-}
-
diff --git a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala b/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
deleted file mode 100644
index c6e18b7abe..0000000000
--- a/src/detach/plugin/scala/tools/detach/DetachPlugin.scala
+++ /dev/null
@@ -1,41 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Stephane Micheloud
- */
-
-package scala.tools.detach
-
-import scala.tools.nsc.{Global, Phase}
-import scala.tools.nsc.plugins.{Plugin, PluginComponent}
-
-class DetachPlugin(val global: Global) extends Plugin {
- import global._
-
- val name = "detach"
- val description = "Perform detaching of remote closures"
-
- object detach extends {
- val global = DetachPlugin.this.global
- val runsAfter = List("lambdalift")
- override val runsBefore = List("constructors")
- } with Detach
-
- val components = List[PluginComponent](detach)
-
- def setEnabled(flag: Boolean) { detach.isEnabled = flag }
-
- override def processOptions(options: List[String], error: String => Unit) = {
- var enabled = false
- for (option <- options) {
- if (option == "enable") {
- enabled = true
- } else {
- error("Option not understood: "+option)
- }
- }
- setEnabled(enabled)
- }
-
- override val optionsHelp: Option[String] =
- Some(" -P:detach:enable Enable detaching of remote closures")
-}
diff --git a/src/detach/plugin/scalac-plugin.xml b/src/detach/plugin/scalac-plugin.xml
deleted file mode 100644
index 6c8600e331..0000000000
--- a/src/detach/plugin/scalac-plugin.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-<plugin>
- <name>detach</name>
- <classname>scala.tools.detach.DetachPlugin</classname>
-</plugin>
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index 39a3f457a0..73aa270b77 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -29,7 +29,7 @@ inside `src/library` with the following contents:
version.number=2.10.0-20120603-141530-b34313db72
maven.version.number=2.10.0-SNAPSHOT
osgi.version.number=2.10.0.v20120603-141530-b34313db72
- copyright.string=Copyright 2002-2012 LAMP/EPFL
+ copyright.string=Copyright 2002-2013 LAMP/EPFL
4. Project files are tracked by Git, so adding them to `.gitignore` won't prevent them
from being shown as dirty in `git status`. You can still ignore them by telling Git to
@@ -44,7 +44,7 @@ If you want to go back to normal (for instance, to commit your changes to projec
DETAILS
=======
-The compiler project depends on the library, reflect, asm and fjbg projects. The
+The compiler project depends on the library, reflect, and asm projects. The
builder will take care of the correct ordering, and changes in one project will
be picked up by the dependent projects.
diff --git a/src/eclipse/fjbg/.classpath b/src/eclipse/fjbg/.classpath
deleted file mode 100644
index 3e2f55f48a..0000000000
--- a/src/eclipse/fjbg/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="fjbg"/>
- <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
- <classpathentry kind="output" path="libs-classes-fjbg"/>
-</classpath>
diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath
new file mode 100644
index 0000000000..870cc67aec
--- /dev/null
+++ b/src/eclipse/interactive/.classpath
@@ -0,0 +1,10 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="interactive"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-interactive"/>
+</classpath>
diff --git a/src/eclipse/interactive/.project b/src/eclipse/interactive/.project
new file mode 100644
index 0000000000..1d30e0c001
--- /dev/null
+++ b/src/eclipse/interactive/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>interactive</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-interactive</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/interactive</locationURI>
+ </link>
+ <link>
+ <name>interactive</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/interactive</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 7936d4d4b4..a990c5a1b3 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -8,8 +8,8 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="output" path="build-quick-partest"/>
</classpath>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
new file mode 100644
index 0000000000..30744da306
--- /dev/null
+++ b/src/eclipse/repl/.classpath
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="repl"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/jline.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="output" path="build-quick-repl"/>
+</classpath>
diff --git a/src/eclipse/repl/.project b/src/eclipse/repl/.project
new file mode 100644
index 0000000000..ea188bc262
--- /dev/null
+++ b/src/eclipse/repl/.project
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>repl</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-repl</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/repl</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>repl</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/repl</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index d438d3e610..0488a0dc39 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -3,12 +3,10 @@
<classpathentry kind="src" path="compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/fjbg"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="output" path="build-quick-compiler"/>
</classpath>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
new file mode 100644
index 0000000000..f12ba4bb2c
--- /dev/null
+++ b/src/eclipse/scaladoc/.classpath
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry combineaccessrules="false" kind="src" path="/partest"/>
+ <classpathentry kind="src" path="scaladoc"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="output" path="build-quick-scaladoc"/>
+</classpath>
diff --git a/src/eclipse/fjbg/.project b/src/eclipse/scaladoc/.project
index 8acea9f5fe..bf7649039f 100644
--- a/src/eclipse/fjbg/.project
+++ b/src/eclipse/scaladoc/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>fjbg</name>
+ <name>scaladoc</name>
<comment></comment>
<projects>
</projects>
@@ -17,14 +17,19 @@
</natures>
<linkedResources>
<link>
- <name>fjbg</name>
+ <name>build-quick-scaladoc</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/src/fjbg</locationURI>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/scaladoc</locationURI>
</link>
<link>
- <name>libs-classes-fjbg</name>
+ <name>lib</name>
<type>2</type>
- <locationURI>SCALA_BASEDIR/build/libs/classes/fjbg</locationURI>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>scaladoc</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/scaladoc</locationURI>
</link>
</linkedResources>
</projectDescription>
diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath
index 16737bd9cd..0a55745702 100644
--- a/src/eclipse/scalap/.classpath
+++ b/src/eclipse/scalap/.classpath
@@ -7,7 +7,6 @@
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/ant/ant.jar"/>
<classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry kind="lib" path="lib/msil.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="output" path="build-quick-scalap"/>
</classpath>
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java b/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
deleted file mode 100644
index 9856dc7311..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/FJBGContext.java
+++ /dev/null
@@ -1,195 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-/**
- * Context in which FJBG executes. Used both as a factory for most
- * FJBG classes and as a repository for other factories.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class FJBGContext {
- /** Class file major version */
- final int MAJOR_VERSION;
-
- /** Class file minor version */
- final int MINOR_VERSION;
-
- public FJBGContext() {
- this(45, 3);
- }
-
- public FJBGContext(int major, int minor) {
- MAJOR_VERSION = major;
- MINOR_VERSION = minor;
- }
-
- // Factory methods
- //////////////////////////////////////////////////////////////////////
-
- public JClass JClass(int accessFlags,
- String name,
- String superclassName,
- String[] interfaceNames,
- String sourceFileName) {
- return new JClass(this,
- accessFlags,
- name,
- superclassName,
- interfaceNames,
- sourceFileName);
- }
-
- public JClass JClass(DataInputStream stream)
- throws IOException {
- return new JClass(this, stream);
- }
-
- public JConstantPool JConstantPool() {
- return new JConstantPool(this);
- }
-
- public JConstantPool JConstantPool(DataInputStream stream)
- throws IOException {
- return new JConstantPool(this, stream);
- }
-
- public JField JField(JClass owner,
- int accessFlags,
- String name,
- JType type) {
- return new JField(this,
- owner,
- accessFlags,
- name,
- type);
- }
-
- public JField JField(JClass owner, DataInputStream stream)
- throws IOException {
- return new JField(this, owner, stream);
- }
-
- public JMethod JMethod(JClass owner,
- int accessFlags,
- String name,
- JType returnType,
- JType[] argTypes,
- String[] argNames) {
- return new JMethod(this,
- owner,
- accessFlags,
- name,
- returnType,
- argTypes,
- argNames);
- }
-
- public JMethod JMethod(JClass owner,
- int accessFlags,
- String name,
- JMethodType type,
- String[] argNames) {
- return JMethod(owner,
- accessFlags,
- name,
- type.getReturnType(),
- type.getArgumentTypes(),
- argNames);
- }
-
- public JMethod JMethod(JClass owner, DataInputStream stream)
- throws IOException {
- return new JMethod(this, owner, stream);
- }
-
- public JLocalVariable JLocalVariable(JMethod owner,
- JType type,
- String name,
- int index) {
- return new JLocalVariable(this, owner, type, name, index);
- }
-
- public JCode JCode(JClass clazz, JMethod owner) {
- return new JExtendedCode(this, clazz, owner);
- }
-
- public JCode JCode(JClass clazz, JMethod owner, DataInputStream stream)
- throws IOException {
- return new JCode(this, clazz, owner, stream);
- }
-
- public JAttributeFactory JAttributeFactory() {
- return new JAttributeFactory(this);
- }
-
- // Attributes
- public JCodeAttribute JCodeAttribute(JClass clazz, JMethod owner) {
- return new JCodeAttribute(this, clazz, owner);
- }
-
- public JEnclosingMethodAttribute JEnclosingMethodAttribute(JClass clazz,
- String className,
- String methodName,
- JType methodType) {
- return new JEnclosingMethodAttribute(this, clazz, className, methodName, methodType);
- }
-
- public JExceptionsAttribute JExceptionsAttribute(JClass clazz,
- JMethod owner) {
- return new JExceptionsAttribute(this, clazz, owner);
- }
-
- public JLineNumberTableAttribute JLineNumberTableAttribute(JClass clazz,
- JCode owner) {
- return new JLineNumberTableAttribute(this, clazz, owner);
- }
-
- public JLocalVariableTableAttribute JLocalVariableTableAttribute(JClass clazz,
- JCode owner) {
- return new JLocalVariableTableAttribute(this, clazz, owner);
- }
-
- public JOtherAttribute JOtherAttribute(JClass clazz,
- Object owner,
- String name,
- byte[] contents,
- int length) {
- return new JOtherAttribute(this, clazz, owner, name, contents, length);
- }
-
- public JOtherAttribute JOtherAttribute(JClass clazz,
- Object owner,
- String name,
- byte[] contents) {
- return JOtherAttribute(clazz, owner, name, contents, contents.length);
- }
-
- public JSourceFileAttribute JSourceFileAttribute(JClass clazz,
- String sourceFileName) {
- return new JSourceFileAttribute(this, clazz, sourceFileName);
- }
-
- public JStackMapTableAttribute JStackMapTableAttribute(JClass clazz,
- JCode owner) {
- return new JStackMapTableAttribute(this, clazz, owner);
- }
-
- /// Repository
- //////////////////////////////////////////////////////////////////////
-
- protected JAttributeFactory jAttributeFactory = null;
- public JAttributeFactory getJAttributeFactory() {
- if (jAttributeFactory == null)
- jAttributeFactory = JAttributeFactory();
- return jAttributeFactory;
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java b/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
deleted file mode 100644
index 01d8cc9a7e..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAccessFlags.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Definition of access flags for fields, methods and classes.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public interface JAccessFlags {
- public static int ACC_PUBLIC = 0x0001;
- public static int ACC_PRIVATE = 0x0002;
- public static int ACC_PROTECTED = 0x0004;
- public static int ACC_STATIC = 0x0008;
- public static int ACC_FINAL = 0x0010;
- public static int ACC_SUPER = 0x0020;
- public static int ACC_VOLATILE = 0x0040;
- public static int ACC_TRANSIENT = 0x0080;
- public static int ACC_NATIVE = 0x0100;
- public static int ACC_INTERFACE = 0x0200;
- public static int ACC_ABSTRACT = 0x0400;
- public static int ACC_STRICT = 0x0800;
- public static int ACC_SYNTHETIC = 0x1000;
- public static int ACC_ANNOTATION= 0x2000;
- public static int ACC_ENUM = 0x4000;
-
- // 1.5 specifics
- public static int ACC_BRIDGE = 0x0040;
- public static int ACC_VARARGS = 0x0080;
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java b/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
deleted file mode 100644
index 61a04523ca..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JArrayType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java arrays.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JArrayType extends JReferenceType {
- protected final JType elementType;
- protected String signature = null;
-
- public JArrayType(JType elementType) {
- this.elementType = elementType;
- }
-
- public int getSize() { return 1; }
-
- public String getSignature() {
- if (signature == null)
- signature = "[" + elementType.getSignature();
- return signature;
- }
-
- public String getDescriptor() {
- return getSignature();
- }
-
- public int getTag() { return T_ARRAY; }
-
- public JType getElementType() { return elementType; }
-
- public String toString() {
- return elementType.toString() + "[]";
- }
-
- public boolean isArrayType() { return true; }
-
- public boolean isCompatibleWith(JType other) {
- if (other instanceof JObjectType)
- return (JObjectType)other == JObjectType.JAVA_LANG_OBJECT;
- else if (other instanceof JArrayType)
- return elementType.isCompatibleWith(((JArrayType)other).elementType);
- else return other == JType.REFERENCE;
- }
-
- public static JArrayType BOOLEAN = new JArrayType(JType.BOOLEAN);
- public static JArrayType BYTE = new JArrayType(JType.BYTE);
- public static JArrayType CHAR = new JArrayType(JType.CHAR);
- public static JArrayType SHORT = new JArrayType(JType.SHORT);
- public static JArrayType INT = new JArrayType(JType.INT);
- public static JArrayType FLOAT = new JArrayType(JType.FLOAT);
- public static JArrayType LONG = new JArrayType(JType.LONG);
- public static JArrayType DOUBLE = new JArrayType(JType.DOUBLE);
- public static JArrayType REFERENCE = new JArrayType(JType.REFERENCE);
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
deleted file mode 100644
index 6a825beb18..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttribute.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.*;
-
-/**
- * Abstract superclass for attributes which can be attached to various
- * parts of a class file.
- *
- * Attributes are used for classes (section 4.2), fields (section 4.6),
- * methods (section 4.7) and the Code attribute (section 4.8.3).
- * See sections 4.2 and later of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public abstract class JAttribute {
- protected final int nameIdx;
-
- static public void writeTo(List/*<JAttribute>*/ attrs, DataOutputStream stream)
- throws IOException {
- stream.writeShort(attrs.size());
- Iterator attrsIt = attrs.iterator();
- while (attrsIt.hasNext()) {
- JAttribute attr = (JAttribute)attrsIt.next();
- attr.writeTo(stream);
- }
- }
-
- static public List/*<JAttribute>*/ readFrom(FJBGContext context,
- JClass clazz,
- Object owner,
- DataInputStream stream)
- throws IOException {
- JAttributeFactory factory = context.getJAttributeFactory();
- int count = stream.readShort();
- ArrayList list = new ArrayList(count);
- for (int i = 0; i < count; ++i)
- list.add(factory.newInstance(clazz, owner, stream));
- return list;
- }
-
- public JAttribute(FJBGContext context, JClass clazz) {
- this.nameIdx = clazz.getConstantPool().addUtf8(getName());
- }
-
- public JAttribute(FJBGContext context, JClass clazz, String name) {
- this.nameIdx = clazz.getConstantPool().addUtf8(name);
- }
-
- abstract public String getName();
-
- /**
- * Write the attribute to a stream.
- */
- public void writeTo(DataOutputStream stream) throws IOException {
- int contentsSize = getSize();
-
- stream.writeShort(nameIdx);
- stream.writeInt(contentsSize);
- int streamSizeBefore = stream.size();
- writeContentsTo(stream);
- int streamSizeDiff = stream.size() - streamSizeBefore;
-
- assert contentsSize == streamSizeDiff
- : "invalid size for attribute " + getName()
- + " given: " + contentsSize
- + " actual: " + streamSizeDiff;
- }
-
- // Note: it is not legal to add data to the constant pool during
- // the execution of any of the following two methods.
- protected abstract int getSize();
- protected abstract void writeContentsTo(DataOutputStream stream)
- throws IOException;
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java b/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
deleted file mode 100644
index 33cdce2760..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JAttributeFactory.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.util.HashMap;
-
-/**
- * Extensible factory to build subclasses of JAttribute based on an
- * attribute name.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JAttributeFactory {
- protected FJBGContext context;
- protected HashMap/*<String, Constructor>*/ constructors = new HashMap();
-
- protected final static Class[] CONSTRUCTOR_ARGS = new Class[] {
- FJBGContext.class,
- JClass.class,
- Object.class,
- String.class,
- int.class,
- DataInputStream.class
- };
-
- protected final static Constructor defaultDefaultConstructor;
- static {
- try {
- defaultDefaultConstructor =
- JOtherAttribute.class.getConstructor(CONSTRUCTOR_ARGS);
- } catch (NoSuchMethodException e) {
- throw new RuntimeException(e);
- }
- }
-
- protected final Constructor defaultConstructor;
-
- public JAttributeFactory(FJBGContext context,
- Constructor defaultConstructor) {
- this.context = context;
- this.defaultConstructor = defaultConstructor;
- registerClass("Code", JCodeAttribute.class);
- registerClass("ConstantValue", JConstantValueAttribute.class);
- registerClass("EnclosingMethod", JEnclosingMethodAttribute.class);
- registerClass("Exceptions", JExceptionsAttribute.class);
- registerClass("InnerClasses", JInnerClassesAttribute.class);
- registerClass("LineNumberTable", JLineNumberTableAttribute.class);
- registerClass("LocalVariableTable", JLocalVariableTableAttribute.class);
- registerClass("SourceFile", JSourceFileAttribute.class);
- registerClass("StackMapTable", JStackMapTableAttribute.class);
- }
-
- public JAttributeFactory(FJBGContext context) {
- this(context, defaultDefaultConstructor);
- }
-
- public void registerClass(String attributeName,
- Class clazz) {
- if (! JAttribute.class.isAssignableFrom(clazz))
- throw new IllegalArgumentException("Not a subclass of JAttribute: "
- + clazz);
-
- try {
- Constructor constr = clazz.getConstructor(CONSTRUCTOR_ARGS);
- constructors.put(attributeName, constr);
- } catch (NoSuchMethodException e) {
- throw new IllegalArgumentException("No appropriate constructor for "
- + clazz);
- }
- }
-
- public JAttribute newInstance(JClass clazz,
- Object owner,
- DataInputStream stream)
- throws IOException {
- String name = clazz.getConstantPool().lookupUtf8(stream.readShort());
- Integer size = new Integer(stream.readInt());
- Constructor constr = (Constructor)constructors.get(name);
- if (constr == null) constr = defaultConstructor;
-
- Object[] args = new Object[] { context, clazz, owner, name, size, stream };
- try {
- return (JAttribute)constr.newInstance(args);
- } catch (InstantiationException e) {
- throw new RuntimeException(e);
- } catch (IllegalAccessException e) {
- throw new RuntimeException(e);
- } catch (InvocationTargetException e) {
- throw new RuntimeException(e);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java b/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
deleted file mode 100644
index bb1538ec23..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JClass.java
+++ /dev/null
@@ -1,420 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.util.*;
-import java.io.*;
-
-/**
- * Representation of a Java class.
- *
- * @author Michel Schinz, Stephane Micheloud
- * @version 1.1
- */
-public class JClass extends JMember {
-
- /** Magic number for Java class files. */
- public final static int MAGIC_NUMBER = 0xCAFEBABE;
-
- protected final JAttributeFactory attributeFactory;
-
- protected final String superclassName;
- protected final String[] interfaceNames;
- protected final String sourceFileName;
- protected final JConstantPool pool;
-
- public final static String[] NO_INTERFACES = new String[0];
-
- protected final LinkedList/*<JMethod>*/ methods = new LinkedList();
- protected final LinkedList/*<JField>*/ fields = new LinkedList();
-
- protected JInnerClassesAttribute innerClasses;
-
- protected int major;
- protected int minor;
-
- /**
- * Creates a new class with its access flags, name, superclass name,
- * interfaces names and source file name initialized to a given value.
- * The constructor also initializes the pool and adds a sourceFileName
- * attribute to the class.
- * @param accessFlags the int representing the access flags of the class.
- * @param name the string representing the name of the class.
- * @param superclassName the string representing the name of the class'
- * superclass.
- * @param interfaceNames the list of strings representing the names of the
- * interfaces implemented by the class.
- * @param sourceFileName name of the file from which the class was compiled.
- */
- protected JClass(FJBGContext context,
- int accessFlags,
- String name,
- String superclassName,
- String[] interfaceNames,
- String sourceFileName) {
- super(context, accessFlags, name);
- this.attributeFactory = context.getJAttributeFactory();
-
- this.major = context.MAJOR_VERSION;
- this.minor = context.MINOR_VERSION;
-
- this.superclassName = superclassName;
- this.interfaceNames = interfaceNames;
- this.sourceFileName = sourceFileName;
- this.pool = context.JConstantPool();
- if (sourceFileName != null)
- addAttribute(context.JSourceFileAttribute(this, sourceFileName));
- }
-
- protected JClass(FJBGContext context, DataInputStream stream)
- throws IOException {
- super(context);
- this.attributeFactory = context.getJAttributeFactory();
-
- int magic = stream.readInt();
- if (magic != MAGIC_NUMBER)
- throw new IllegalArgumentException("invalid magic number: "+magic);
-
- minor = stream.readShort();
- major = stream.readShort();
- pool = context.JConstantPool(stream);
- accessFlags = stream.readShort();
-
- // This class, super class and interfaces
- name = pool.lookupClass(stream.readShort());
- superclassName = pool.lookupClass(stream.readShort());
- interfaceNames = new String[stream.readShort()];
- for (int i = 0; i < interfaceNames.length; ++i)
- interfaceNames[i] = pool.lookupClass(stream.readShort());
-
- // Fields, methods and attributes
- int fieldsCount = stream.readShort();
- for (int i = 0; i < fieldsCount; ++i)
- addField(context.JField(this, stream));
-
- int methodsCount = stream.readShort();
- for (int i = 0; i < methodsCount; ++i)
- addMethod(context.JMethod(this, stream));
-
- String fileName = null;
- int attributesCount = stream.readShort();
- for (int i = 0; i < attributesCount; ++i) {
- JAttribute attr = attributeFactory.newInstance(this, this, stream);
- if (attr instanceof JSourceFileAttribute)
- fileName = ((JSourceFileAttribute)attr).getFileName();
- else if (attr instanceof JInnerClassesAttribute)
- innerClasses = (JInnerClassesAttribute)attr;
- addAttribute(attr);
- }
- sourceFileName = fileName;
- }
-
- /**
- * Gets the name of the class' superclass.
- * @return The string representing the name of the class' superclass.
- */
- public String getSuperclassName() { return superclassName; }
-
- /**
- * Gets the names of the interfaces implemented by the class.
- * @return The array containing the string representations of the
- * names of the interfaces implemented by the class.
- */
- public String[] getInterfaceNames() { return interfaceNames; }
-
- /**
- * Gets the source file name of this class.
- * @return The string representing the source file name of this class.
- */
- public String getSourceFileName() { return sourceFileName; }
-
- /**
- * Gets the type of the objects that are instances of the class.
- * @return The type of the instances of the class.
- */
- public JType getType() { return new JObjectType(name); }
-
- public JClass getJClass() { return this; }
-
- public boolean isPublic() {
- return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0;
- }
-
- public boolean isPrivate() {
- return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0;
- }
-
- public boolean isProtected() {
- return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0;
- }
-
- public boolean isStatic() {
- return (accessFlags & JAccessFlags.ACC_STATIC) != 0;
- }
-
- public boolean isFinal() {
- return (accessFlags & JAccessFlags.ACC_FINAL) != 0;
- }
-
- public boolean isAbstract() {
- return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0;
- }
-
- /**
- * Gets the version number of the class.
- * @param major The int representing the major part of the version number
- * of the class.
- * @param minor The int representing the minor part of the version number
- * of the class.
- */
- public void setVersion(int major, int minor) {
- assert !frozen;
- this.major = major;
- this.minor = minor;
- }
-
- /**
- * Gets the major part of the number describing the version of the class.
- * @return The int representing the major part of the version number of
- * the class.
- */
- public int getMajorVersion() { return major; }
-
- /**
- * Gets the minor part of the number describing the version of the class.
- * @return The int representing the minor part of the version number of
- * the class.
- */
- public int getMinorVersion() { return minor; }
-
- /**
- * Gets the constant pool of the class.
- * @return The constant pool of the class.
- */
- public JConstantPool getConstantPool() { return pool; }
-
- public JInnerClassesAttribute getInnerClasses() {
- if (innerClasses == null) {
- innerClasses = new JInnerClassesAttribute(context, this);
- addAttribute(innerClasses);
- }
- return innerClasses;
- }
-
- /**
- * Decides if the class is an interface.
- * @return The boolean representing if the class is an interface or not.
- */
- public boolean isInterface() {
- return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0;
- }
-
- public void addField(JField field) {
- assert !frozen;
- fields.add(field);
- }
-
- /**
- * Create and add a new field to the class.
- */
- public JField addNewField(int accessFlags, String name, JType type) {
- assert !frozen;
- JField f = context.JField(this, accessFlags, name, type);
- addField(f);
- return f;
- }
-
- protected void addMethod(JMethod method) {
- assert !frozen;
- methods.add(method);
- }
-
- /**
- * Create and add a new method to the class.
- */
- public JMethod addNewMethod(int accessFlags,
- String name,
- JType returnType,
- JType[] argTypes,
- String[] argNames) {
- assert !frozen;
- JMethod m = context.JMethod(this,
- accessFlags,
- name,
- returnType,
- argTypes,
- argNames);
- addMethod(m);
- return m;
- }
-
- /**
- * Remove a previously-added method. This makes no attempt at
- * minimising the constant pool by removing all constants which
- * were used only by this method.
- */
- public void removeMethod(JMethod m) {
- assert !frozen;
- methods.remove(m);
- }
-
- public JField[] getFields() {
- return (JField[])fields.toArray(new JField[fields.size()]);
- }
-
- public JMethod[] getMethods() {
- return (JMethod[])methods.toArray(new JMethod[methods.size()]);
- }
-
- /**
- * Freeze the contents of this class so that it can be written to
- * a file.
- */
- public void freeze() {
- assert !frozen;
- frozen = true;
- }
-
- /**
- * Writes the contents of the class to a file referenced by its name.
- * @param fileName The name of the file in which the class must be written.
- */
- public void writeTo(String fileName) throws IOException {
- writeTo(new File(fileName));
- }
-
- /**
- * Writes the contents of the class to a file.
- * @param file The file in which the class must be written.
- */
- public void writeTo(File file) throws IOException {
- File parent = file.getParentFile();
- if (parent != null && !parent.isDirectory())
- if (!parent.mkdirs())
- throw new IOException("cannot create directory " + parent);
-
- FileOutputStream fStream = new FileOutputStream(file);
- BufferedOutputStream bStream = new BufferedOutputStream(fStream);
- DataOutputStream dStream = new DataOutputStream(bStream);
- writeTo(dStream);
- dStream.close();
- bStream.close();
- fStream.close();
- }
-
- /**
- * Writes the contents of the class to a data stream.
- * @param stream The data stream in which the class must be written.
- */
- public void writeTo(DataOutputStream stream) throws IOException {
- if (!frozen) freeze();
-
- int thisClassIdx = pool.addClass(name);
- int superClassIdx = pool.addClass(superclassName);
- int[] interfacesIdx = new int[interfaceNames.length];
-
- for (int i = 0; i < interfaceNames.length; ++i)
- interfacesIdx[i] = pool.addClass(interfaceNames[i]);
-
- pool.freeze();
-
- // Magic number.
- stream.writeInt(MAGIC_NUMBER);
- // Version
- stream.writeShort(minor);
- stream.writeShort(major);
- // Constant pool
- pool.writeTo(stream);
- // Access flags
- stream.writeShort(accessFlags);
-
- // This class, super class and interfaces
- stream.writeShort(thisClassIdx);
- stream.writeShort(superClassIdx);
- stream.writeShort(interfacesIdx.length);
- for (int i = 0; i < interfacesIdx.length; ++i)
- stream.writeShort(interfacesIdx[i]);
-
- // Fields and methods
- stream.writeShort(fields.size());
- Iterator fieldsIt = fields.iterator();
- while (fieldsIt.hasNext())
- ((JField)fieldsIt.next()).writeTo(stream);
-
- stream.writeShort(methods.size());
- Iterator methodsIt = methods.iterator();
- while (methodsIt.hasNext())
- ((JMethod)methodsIt.next()).writeTo(stream);
-
- // Attributes
- JAttribute.writeTo(attributes, stream);
- }
-
- // Follows javap output format for ClassFile.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer();
- if (sourceFileName != null) {
- buf.append("Compiled from \"");
- buf.append(sourceFileName);
- buf.append("\"\n");
- }
- buf.append(getMemberName());
- buf.append(toExternalName(getName()));
- if (!isInterface()) {
- buf.append(" extends ");
- buf.append(toExternalName(getSuperclassName()));
- }
- if (interfaceNames.length > 0) {
- if (isInterface()) buf.append(" extends ");
- else buf.append(" implements ");
- for (int i = 0; i < interfaceNames.length; ++i) {
- if (i > 0) buf.append(",");
- buf.append(toExternalName(interfaceNames[i]));
- }
- }
- buf.append("\n");
- Iterator attrsIt = attributes.iterator();
- while (attrsIt.hasNext()) {
- JAttribute attr = (JAttribute)attrsIt.next();
- buf.append(attr);
- }
- buf.append(" minor version: ");
- buf.append(minor);
- buf.append("\n major version: ");
- buf.append(major);
- buf.append("\n");
- buf.append(pool);
- buf.append("\n{\n");
- JField[] jfields = getFields();
- for (int i = 0; i < jfields.length; ++i) {
- if (i > 0) buf.append("\n");
- buf.append(jfields[i]);
- }
- buf.append("\n");
- JMethod[] jmethods = getMethods();
- for (int i = 0; i < jmethods.length; ++i) {
- if (i > 0) buf.append("\n");
- buf.append(jmethods[i]);
- }
- buf.append("\n}\n");
- return buf.toString();
- }
-
- private String getMemberName() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- if (isInterface())
- buf.append("interface ");
- else {
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- buf.append("class ");
- }
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
deleted file mode 100644
index ab6934ab30..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCode.java
+++ /dev/null
@@ -1,1308 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.*;
-
-import ch.epfl.lamp.util.ByteArray;
-
-/**
- * List of instructions, to which Java byte-code instructions can be added.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JCode {
- protected boolean frozen = false;
-
- public static int MAX_CODE_SIZE = 65535;
-
- protected final FJBGContext context;
- protected final JMethod owner;
-
- protected final ByteArray codeArray;
-
- protected final LinkedList/*<ExceptionHandler>*/ exceptionHandlers =
- new LinkedList();
-
- protected final JConstantPool pool;
-
- protected final ArrayList/*<OffsetToPatch>*/ offsetToPatch =
- new ArrayList();
-
- protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE;
- protected int maxStackSize = UNKNOWN_STACK_SIZE;
- protected int[] stackProduction = null;
- protected int[] stackSizes;
-
- protected JCode(FJBGContext context, JClass clazz, JMethod owner) {
- this.context = context;
- this.pool = clazz.getConstantPool();
- this.owner = owner;
- this.codeArray = new ByteArray();
- }
-
- protected JCode(FJBGContext context,
- JClass clazz,
- JMethod owner,
- DataInputStream stream)
- throws IOException {
- this.context = context;
- this.pool = clazz.getConstantPool();
- this.owner = owner;
- owner.setCode(this);
- int size = stream.readInt();
- if (size > MAX_CODE_SIZE) // section 4.10
- throw new Error("code size must be less than " + MAX_CODE_SIZE + ": " + size);
- this.codeArray = new ByteArray(stream, size);
- }
-
- /**
- * Gets the program counter, which is defined as the address of the
- * next instruction.
- * @return The int representing the value of the program counter
- */
- public int getPC() {
- return codeArray.getSize();
- }
-
- /**
- * Gets the size of the code
- * @return The number of bytes of the code
- */
- public int getSize() {
- return codeArray.getSize();
- }
-
- /**
- * Gets the method to which the code belongs
- * @return The method to which the code belongs
- */
- public JMethod getOwner() {
- return owner;
- }
-
- // Stack size
- public int getMaxStackSize() {
- if (maxStackSize == UNKNOWN_STACK_SIZE)
- maxStackSize = computeMaxStackSize();
- return maxStackSize;
- }
-
- // Freezing
- //////////////////////////////////////////////////////////////////////
-
- public static class CodeSizeTooBigException extends OffsetTooBigException {
- public int codeSize;
-
- public CodeSizeTooBigException(int size) {
- codeSize = size;
- }
- }
-
- public void freeze() throws OffsetTooBigException {
- assert !frozen;
-
- if (getSize() > MAX_CODE_SIZE) throw new CodeSizeTooBigException(getSize());
-
- patchAllOffset();
- codeArray.freeze();
- frozen = true;
- }
-
- // Attributes
- //////////////////////////////////////////////////////////////////////
-
- protected final LinkedList/*<JAttribute>*/ attributes = new LinkedList();
-
- public void addAttribute(JAttribute attr) {
- attributes.add(attr);
- }
-
- public List/*<JAttribute>*/ getAttributes() {
- return attributes;
- }
-
- // Emitting code
- //////////////////////////////////////////////////////////////////////
-
- public void emit(JOpcode opcode) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- }
-
- public void emitNOP() { emit(JOpcode.NOP); }
-
- // Constant loading.
- public void emitACONST_NULL() { emit(JOpcode.ACONST_NULL); }
- public void emitICONST_M1() { emit(JOpcode.ICONST_M1); }
- public void emitICONST_0() { emit(JOpcode.ICONST_0); }
- public void emitICONST_1() { emit(JOpcode.ICONST_1); }
- public void emitICONST_2() { emit(JOpcode.ICONST_2); }
- public void emitICONST_3() { emit(JOpcode.ICONST_3); }
- public void emitICONST_4() { emit(JOpcode.ICONST_4); }
- public void emitICONST_5() { emit(JOpcode.ICONST_5); }
- public void emitLCONST_0() { emit(JOpcode.LCONST_0); }
- public void emitLCONST_1() { emit(JOpcode.LCONST_1); }
- public void emitFCONST_0() { emit(JOpcode.FCONST_0); }
- public void emitFCONST_1() { emit(JOpcode.FCONST_1); }
- public void emitFCONST_2() { emit(JOpcode.FCONST_2); }
- public void emitDCONST_0() { emit(JOpcode.DCONST_0); }
- public void emitDCONST_1() { emit(JOpcode.DCONST_1); }
-
- public void emitBIPUSH(int b) { emitU1(JOpcode.BIPUSH, b); }
- public void emitSIPUSH(int s) { emitU2(JOpcode.SIPUSH, s); }
- public void emitLDC(int value) {
- emitU1(JOpcode.LDC, pool.addInteger(value));
- }
- public void emitLDC(float value) {
- emitU1(JOpcode.LDC, pool.addFloat(value));
- }
- public void emitLDC(String value) {
- emitU1(JOpcode.LDC, pool.addString(value));
- }
- public void emitLDC_W(int value) {
- emitU1(JOpcode.LDC_W, pool.addInteger(value));
- }
- public void emitLDC_W(float value) {
- emitU1(JOpcode.LDC_W, pool.addFloat(value));
- }
- public void emitLDC_W(String value) {
- emitU1(JOpcode.LDC_W, pool.addString(value));
- }
- public void emitLDC2_W(long value) {
- emitU2(JOpcode.LDC2_W, pool.addLong(value));
- }
- public void emitLDC2_W(double value) {
- emitU2(JOpcode.LDC2_W, pool.addDouble(value));
- }
-
- // Loading variables.
- public void emitILOAD(int index) { emitU1(JOpcode.ILOAD, index); }
- public void emitLLOAD(int index) { emitU1(JOpcode.LLOAD, index); }
- public void emitFLOAD(int index) { emitU1(JOpcode.FLOAD, index); }
- public void emitDLOAD(int index) { emitU1(JOpcode.DLOAD, index); }
- public void emitALOAD(int index) { emitU1(JOpcode.ALOAD, index); }
-
- public void emitILOAD_0() { emit(JOpcode.ILOAD_0); }
- public void emitILOAD_1() { emit(JOpcode.ILOAD_1); }
- public void emitILOAD_2() { emit(JOpcode.ILOAD_2); }
- public void emitILOAD_3() { emit(JOpcode.ILOAD_3); }
- public void emitLLOAD_0() { emit(JOpcode.LLOAD_0); }
- public void emitLLOAD_1() { emit(JOpcode.LLOAD_1); }
- public void emitLLOAD_2() { emit(JOpcode.LLOAD_2); }
- public void emitLLOAD_3() { emit(JOpcode.LLOAD_3); }
- public void emitFLOAD_0() { emit(JOpcode.FLOAD_0); }
- public void emitFLOAD_1() { emit(JOpcode.FLOAD_1); }
- public void emitFLOAD_2() { emit(JOpcode.FLOAD_2); }
- public void emitFLOAD_3() { emit(JOpcode.FLOAD_3); }
- public void emitDLOAD_0() { emit(JOpcode.DLOAD_0); }
- public void emitDLOAD_1() { emit(JOpcode.DLOAD_1); }
- public void emitDLOAD_2() { emit(JOpcode.DLOAD_2); }
- public void emitDLOAD_3() { emit(JOpcode.DLOAD_3); }
- public void emitALOAD_0() { emit(JOpcode.ALOAD_0); }
- public void emitALOAD_1() { emit(JOpcode.ALOAD_1); }
- public void emitALOAD_2() { emit(JOpcode.ALOAD_2); }
- public void emitALOAD_3() { emit(JOpcode.ALOAD_3); }
-
- public void emitIALOAD() { emit(JOpcode.IALOAD); }
- public void emitLALOAD() { emit(JOpcode.LALOAD); }
- public void emitFALOAD() { emit(JOpcode.FALOAD); }
- public void emitDALOAD() { emit(JOpcode.DALOAD); }
- public void emitAALOAD() { emit(JOpcode.AALOAD); }
- public void emitBALOAD() { emit(JOpcode.BALOAD); }
- public void emitCALOAD() { emit(JOpcode.CALOAD); }
- public void emitSALOAD() { emit(JOpcode.SALOAD); }
-
- // Storing variables.
- public void emitISTORE(int index) { emitU1(JOpcode.ISTORE, index); }
- public void emitLSTORE(int index) { emitU1(JOpcode.LSTORE, index); }
- public void emitFSTORE(int index) { emitU1(JOpcode.FSTORE, index); }
- public void emitDSTORE(int index) { emitU1(JOpcode.DSTORE, index); }
- public void emitASTORE(int index) { emitU1(JOpcode.ASTORE, index); }
-
- public void emitISTORE_0() { emit(JOpcode.ISTORE_0); }
- public void emitISTORE_1() { emit(JOpcode.ISTORE_1); }
- public void emitISTORE_2() { emit(JOpcode.ISTORE_2); }
- public void emitISTORE_3() { emit(JOpcode.ISTORE_3); }
- public void emitLSTORE_0() { emit(JOpcode.LSTORE_0); }
- public void emitLSTORE_1() { emit(JOpcode.LSTORE_1); }
- public void emitLSTORE_2() { emit(JOpcode.LSTORE_2); }
- public void emitLSTORE_3() { emit(JOpcode.LSTORE_3); }
- public void emitFSTORE_0() { emit(JOpcode.FSTORE_0); }
- public void emitFSTORE_1() { emit(JOpcode.FSTORE_1); }
- public void emitFSTORE_2() { emit(JOpcode.FSTORE_2); }
- public void emitFSTORE_3() { emit(JOpcode.FSTORE_3); }
- public void emitDSTORE_0() { emit(JOpcode.DSTORE_0); }
- public void emitDSTORE_1() { emit(JOpcode.DSTORE_1); }
- public void emitDSTORE_2() { emit(JOpcode.DSTORE_2); }
- public void emitDSTORE_3() { emit(JOpcode.DSTORE_3); }
- public void emitASTORE_0() { emit(JOpcode.ASTORE_0); }
- public void emitASTORE_1() { emit(JOpcode.ASTORE_1); }
- public void emitASTORE_2() { emit(JOpcode.ASTORE_2); }
- public void emitASTORE_3() { emit(JOpcode.ASTORE_3); }
-
- public void emitIASTORE() { emit(JOpcode.IASTORE); }
- public void emitLASTORE() { emit(JOpcode.LASTORE); }
- public void emitFASTORE() { emit(JOpcode.FASTORE); }
- public void emitDASTORE() { emit(JOpcode.DASTORE); }
- public void emitAASTORE() { emit(JOpcode.AASTORE); }
- public void emitBASTORE() { emit(JOpcode.BASTORE); }
- public void emitCASTORE() { emit(JOpcode.CASTORE); }
- public void emitSASTORE() { emit(JOpcode.SASTORE); }
-
- // Stack manipulation.
- public void emitPOP() { emit(JOpcode.POP); }
- public void emitPOP2() { emit(JOpcode.POP2); }
- public void emitDUP() { emit(JOpcode.DUP); }
- public void emitDUP_X1() { emit(JOpcode.DUP_X1); }
- public void emitDUP_X2() { emit(JOpcode.DUP_X2); }
- public void emitDUP2() { emit(JOpcode.DUP2); }
- public void emitDUP2_X1() { emit(JOpcode.DUP2_X1); }
- public void emitDUP2_X2() { emit(JOpcode.DUP2_X2); }
- public void emitSWAP() { emit(JOpcode.SWAP); }
-
- // Artithmetic and logic operations.
- public void emitIADD() { emit(JOpcode.IADD); }
- public void emitLADD() { emit(JOpcode.LADD); }
- public void emitFADD() { emit(JOpcode.FADD); }
- public void emitDADD() { emit(JOpcode.DADD); }
-
- public void emitISUB() { emit(JOpcode.ISUB); }
- public void emitLSUB() { emit(JOpcode.LSUB); }
- public void emitFSUB() { emit(JOpcode.FSUB); }
- public void emitDSUB() { emit(JOpcode.DSUB); }
-
- public void emitIMUL() { emit(JOpcode.IMUL); }
- public void emitLMUL() { emit(JOpcode.LMUL); }
- public void emitFMUL() { emit(JOpcode.FMUL); }
- public void emitDMUL() { emit(JOpcode.DMUL); }
-
- public void emitIDIV() { emit(JOpcode.IDIV); }
- public void emitLDIV() { emit(JOpcode.LDIV); }
- public void emitFDIV() { emit(JOpcode.FDIV); }
- public void emitDDIV() { emit(JOpcode.DDIV); }
-
- public void emitIREM() { emit(JOpcode.IREM); }
- public void emitLREM() { emit(JOpcode.LREM); }
- public void emitFREM() { emit(JOpcode.FREM); }
- public void emitDREM() { emit(JOpcode.DREM); }
-
- public void emitINEG() { emit(JOpcode.INEG); }
- public void emitLNEG() { emit(JOpcode.LNEG); }
- public void emitFNEG() { emit(JOpcode.FNEG); }
- public void emitDNEG() { emit(JOpcode.DNEG); }
-
- public void emitISHL() { emit(JOpcode.ISHL); }
- public void emitLSHL() { emit(JOpcode.LSHL); }
-
- public void emitISHR() { emit(JOpcode.ISHR); }
- public void emitLSHR() { emit(JOpcode.LSHR); }
-
- public void emitIUSHR() { emit(JOpcode.IUSHR); }
- public void emitLUSHR() { emit(JOpcode.LUSHR); }
-
- public void emitIAND() { emit(JOpcode.IAND); }
- public void emitLAND() { emit(JOpcode.LAND); }
-
- public void emitIOR() { emit(JOpcode.IOR); }
- public void emitLOR() { emit(JOpcode.LOR); }
-
- public void emitIXOR() { emit(JOpcode.IXOR); }
- public void emitLXOR() { emit(JOpcode.LXOR); }
-
- public void emitIINC(int index, int increment) {
- emitU1U1(JOpcode.IINC, index, increment);
- }
-
- // (Numeric) type conversions.
- public void emitI2L() { emit(JOpcode.I2L); }
- public void emitI2F() { emit(JOpcode.I2F); }
- public void emitI2D() { emit(JOpcode.I2D); }
- public void emitL2I() { emit(JOpcode.L2I); }
- public void emitL2F() { emit(JOpcode.L2F); }
- public void emitL2D() { emit(JOpcode.L2D); }
- public void emitF2I() { emit(JOpcode.F2I); }
- public void emitF2L() { emit(JOpcode.F2L); }
- public void emitF2D() { emit(JOpcode.F2D); }
- public void emitD2I() { emit(JOpcode.D2I); }
- public void emitD2L() { emit(JOpcode.D2L); }
- public void emitD2F() { emit(JOpcode.D2F); }
- public void emitI2B() { emit(JOpcode.I2B); }
- public void emitI2C() { emit(JOpcode.I2C); }
- public void emitI2S() { emit(JOpcode.I2S); }
-
- // Comparisons and tests.
- public void emitLCMP() { emit(JOpcode.LCMP); }
- public void emitFCMPL() { emit(JOpcode.FCMPL); }
- public void emitFCMPG() { emit(JOpcode.FCMPG); }
- public void emitDCMPL() { emit(JOpcode.DCMPL); }
- public void emitDCMPG() { emit(JOpcode.DCMPG); }
-
- protected void emitGenericIF(JOpcode opcode, Label label)
- throws OffsetTooBigException {
- emitU2(opcode, label.getOffset16(getPC() + 1, getPC()));
- }
-
- public void emitIFEQ(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFEQ, label);
- }
- public void emitIFEQ(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFEQ, targetPC - getPC());
- }
- public void emitIFEQ() {
- emitU2(JOpcode.IFEQ, 0);
- }
-
- public void emitIFNE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFNE, label);
- }
- public void emitIFNE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFNE, targetPC - getPC());
- }
- public void emitIFNE() {
- emitU2(JOpcode.IFNE, 0);
- }
-
- public void emitIFLT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFLT, label);
- }
- public void emitIFLT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFLT, targetPC - getPC());
- }
- public void emitIFLT() {
- emitU2(JOpcode.IFLT, 0);
- }
-
- public void emitIFGE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFGE, label);
- }
- public void emitIFGE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFGE, targetPC - getPC());
- }
- public void emitIFGE() {
- emitU2(JOpcode.IFGE, 0);
- }
-
- public void emitIFGT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFGT, label);
- }
- public void emitIFGT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFGT, targetPC - getPC());
- }
- public void emitIFGT() {
- emitU2(JOpcode.IFGT, 0);
- }
-
- public void emitIFLE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFLE, label);
- }
- public void emitIFLE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFLE, targetPC - getPC());
- }
- public void emitIFLE() {
- emitU2(JOpcode.IFLE, 0);
- }
-
- public void emitIF_ICMPEQ(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPEQ, label);
- }
- public void emitIF_ICMPEQ(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPEQ, targetPC - getPC());
- }
- public void emitIF_ICMPEQ() {
- emitU2(JOpcode.IF_ICMPEQ, 0);
- }
-
- public void emitIF_ICMPNE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPNE, label);
- }
- public void emitIF_ICMPNE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPNE, targetPC - getPC());
- }
- public void emitIF_ICMPNE() {
- emitU2(JOpcode.IF_ICMPNE, 0);
- }
-
- public void emitIF_ICMPLT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPLT, label);
- }
- public void emitIF_ICMPLT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPLT, targetPC - getPC());
- }
- public void emitIF_ICMPLT() {
- emitU2(JOpcode.IF_ICMPLT, 0);
- }
-
- public void emitIF_ICMPGE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPGE, label);
- }
- public void emitIF_ICMPGE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPGE, targetPC - getPC());
- }
- public void emitIF_ICMPGE() {
- emitU2(JOpcode.IF_ICMPGE, 0);
- }
-
- public void emitIF_ICMPGT(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPGT, label);
- }
- public void emitIF_ICMPGT(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPGT, targetPC - getPC());
- }
- public void emitIF_ICMPGT() {
- emitU2(JOpcode.IF_ICMPGT, 0);
- }
-
- public void emitIF_ICMPLE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ICMPLE, label);
- }
- public void emitIF_ICMPLE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ICMPLE, targetPC - getPC());
- }
- public void emitIF_ICMPLE() {
- emitU2(JOpcode.IF_ICMPLE, 0);
- }
-
- public void emitIF_ACMPEQ(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ACMPEQ, label);
- }
- public void emitIF_ACMPEQ(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ACMPEQ, targetPC - getPC());
- }
- public void emitIF_ACMPEQ() {
- emitU2(JOpcode.IF_ACMPEQ, 0);
- }
-
- public void emitIF_ACMPNE(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IF_ACMPNE, label);
- }
- public void emitIF_ACMPNE(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IF_ACMPNE, targetPC - getPC());
- }
- public void emitIF_ACMPNE() {
- emitU2(JOpcode.IF_ACMPNE, 0);
- }
-
- public void emitIFNULL(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFNULL, label);
- }
- public void emitIFNULL(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFNULL, targetPC - getPC());
- }
- public void emitIFNULL() {
- emitU2(JOpcode.IFNULL, 0);
- }
-
- public void emitIFNONNULL(Label label) throws OffsetTooBigException {
- emitGenericIF(JOpcode.IFNONNULL, label);
- }
- public void emitIFNONNULL(int targetPC) throws OffsetTooBigException {
- emitU2(JOpcode.IFNONNULL, targetPC - getPC());
- }
- public void emitIFNONNULL() {
- emitU2(JOpcode.IFNONNULL, 0);
- }
-
- public void emitGOTO(Label label) throws OffsetTooBigException {
- emitU2(JOpcode.GOTO, label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitGOTO(int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- checkOffset16(offset);
- emitU2(JOpcode.GOTO, offset);
- }
- public void emitGOTO() {
- emitU2(JOpcode.GOTO, 0);
- }
-
- public void emitGOTO_W(Label label) {
- emitU4(JOpcode.GOTO_W, label.getOffset32(getPC() + 1, getPC()));
- }
- public void emitGOTO_W(int targetPC) {
- emitU4(JOpcode.GOTO_W, targetPC - getPC());
- }
- public void emitGOTO_W() {
- emitU4(JOpcode.GOTO_W, 0);
- }
-
- public void emitJSR(Label label) throws OffsetTooBigException {
- emitU2(JOpcode.JSR, label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitJSR(int targetPC) {
- emitU2(JOpcode.JSR, targetPC - getPC());
- }
- public void emitJSR() {
- emitU2(JOpcode.JSR, 0);
- }
-
- public void emitJSR_W(Label label) {
- emitU4(JOpcode.JSR_W, label.getOffset32(getPC() + 1, getPC()));
- }
- public void emitJSR_W(int targetPC) {
- emitU4(JOpcode.JSR_W, targetPC - getPC());
- }
- public void emitJSR_W() {
- emitU4(JOpcode.JSR_W, 0);
- }
-
- /*
- public void emitRET(Label label) throws OffsetTooBigException {
- emitU2(JOpcode.RET, label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitRET(int targetPC) {
- emitU1(JOpcode.RET, targetPC);
- }
- public void emitRET() {
- emitU1(JOpcode.RET, 0);
- }
- */
-
- public void emitRET(int index) {
- emitU1(JOpcode.RET, index);
- }
-
- public void emitRET(JLocalVariable var) {
- emitRET(var.getIndex());
- }
-
- public void emitTABLESWITCH(int[] keys,
- Label[] branches,
- Label defaultBranch) {
- assert keys.length == branches.length;
-
- int low = keys[0], high = keys[keys.length - 1];
- int instrPC = getPC();
-
- setStackProduction(instrPC, JOpcode.TABLESWITCH);
- codeArray.addU1(JOpcode.cTABLESWITCH);
- while (getPC() % 4 != 0) codeArray.addU1(0);
-
- codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC));
- codeArray.addU4(low);
- codeArray.addU4(high);
- for (int i = 0; i < branches.length; i++) {
- assert keys[i] == low + i;
- codeArray.addU4(branches[i].getOffset32(getPC(), instrPC));
- }
- }
-
- public void emitLOOKUPSWITCH(int[] keys,
- Label[] branches,
- Label defaultBranch) {
- assert keys.length == branches.length;
-
- int instrPC = getPC();
- setStackProduction(getPC(), JOpcode.LOOKUPSWITCH);
- codeArray.addU1(JOpcode.cLOOKUPSWITCH);
- while (getPC() % 4 != 0) codeArray.addU1(0);
-
- codeArray.addU4(defaultBranch.getOffset32(getPC(), instrPC));
- codeArray.addU4(branches.length);
- for (int i = 0; i < branches.length; i++) {
- codeArray.addU4(keys[i]);
- codeArray.addU4(branches[i].getOffset32(getPC(), instrPC));
- }
- }
-
- public void emitIRETURN() { emit(JOpcode.IRETURN); }
- public void emitLRETURN() { emit(JOpcode.LRETURN); }
- public void emitFRETURN() { emit(JOpcode.FRETURN); }
- public void emitDRETURN() { emit(JOpcode.DRETURN); }
- public void emitARETURN() { emit(JOpcode.ARETURN); }
- public void emitRETURN() { emit(JOpcode.RETURN); }
-
- // Field access
- public void emitGETSTATIC(String className, String name, JType type) {
- setStackProduction(getPC(), type.getSize());
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.GETSTATIC, index);
- }
- public void emitPUTSTATIC(String className, String name, JType type) {
- setStackProduction(getPC(), -type.getSize());
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.PUTSTATIC, index);
- }
- public void emitGETFIELD(String className, String name, JType type) {
- setStackProduction(getPC(), type.getSize() - 1);
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.GETFIELD, index);
- }
- public void emitPUTFIELD(String className, String name, JType type) {
- setStackProduction(getPC(), -(type.getSize() + 1));
- int index = pool.addFieldRef(className, name, type.getSignature());
- emitU2(JOpcode.PUTFIELD, index);
- }
-
- // Method invocation
- public void emitINVOKEVIRTUAL(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack() - 1);
- int index =
- pool.addClassMethodRef(className, name, type.getSignature());
- emitU2(JOpcode.INVOKEVIRTUAL, index);
- }
- public void emitINVOKESPECIAL(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack() - 1);
- int index =
- pool.addClassMethodRef(className, name, type.getSignature());
- emitU2(JOpcode.INVOKESPECIAL, index);
- }
- public void emitINVOKESTATIC(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack());
- int index =
- pool.addClassMethodRef(className, name, type.getSignature());
- emitU2(JOpcode.INVOKESTATIC, index);
- }
- public void emitINVOKEINTERFACE(String className,
- String name,
- JMethodType type) {
- setStackProduction(getPC(), type.getProducedStack() - 1);
- int index =
- pool.addInterfaceMethodRef(className, name, type.getSignature());
- emitU2U1U1(JOpcode.INVOKEINTERFACE, index, type.getArgsSize() + 1, 0);
- }
-
- // Object creation
- public void emitNEW(String className) {
- emitU2(JOpcode.NEW, pool.addClass(className));
- }
- public void emitNEWARRAY(JType elemType) {
- emitU1(JOpcode.NEWARRAY, elemType.getTag());
- }
- public void emitANEWARRAY(JReferenceType elemType) {
- emitU2(JOpcode.ANEWARRAY, pool.addDescriptor(elemType));
- }
- public void emitMULTIANEWARRAY(JReferenceType elemType, int dimensions) {
- setStackProduction(getPC(), -dimensions + 1);
- emitU2U1(JOpcode.MULTIANEWARRAY,
- pool.addDescriptor(elemType),
- dimensions);
- }
- public void emitARRAYLENGTH() { emit(JOpcode.ARRAYLENGTH); }
-
- // Exception throwing
- public void emitATHROW() { emit(JOpcode.ATHROW); }
-
- // Dynamic typing
- public void emitCHECKCAST(JReferenceType type) {
- emitU2(JOpcode.CHECKCAST, pool.addDescriptor(type));
- }
- public void emitINSTANCEOF(JReferenceType type) {
- emitU2(JOpcode.INSTANCEOF, pool.addDescriptor(type));
- }
-
- // Monitors
- public void emitMONITORENTER() { emit(JOpcode.MONITORENTER); }
- public void emitMONITOREXIT() { emit(JOpcode.MONITOREXIT); }
-
- // Wide variants
- // FIXME setStackProd. will here raise an exception
- public void emitWIDE(JOpcode opcode, int index) {
- assert (opcode.code == JOpcode.cILOAD)
- || (opcode.code == JOpcode.cLLOAD)
- || (opcode.code == JOpcode.cFLOAD)
- || (opcode.code == JOpcode.cDLOAD)
- || (opcode.code == JOpcode.cALOAD)
- || (opcode.code == JOpcode.cISTORE)
- || (opcode.code == JOpcode.cLSTORE)
- || (opcode.code == JOpcode.cFSTORE)
- || (opcode.code == JOpcode.cDSTORE)
- || (opcode.code == JOpcode.cASTORE)
- || (opcode.code == JOpcode.cRET)
- : "invalide opcode for WIDE: " + opcode;
-
- setStackProduction(getPC(), opcode);
- codeArray.addU1(JOpcode.WIDE.code);
- codeArray.addU1(opcode.code);
- codeArray.addU2(index);
- }
- public void emitWIDE(JOpcode opcode, int index, int constant) {
- assert opcode.code == JOpcode.cIINC
- : "invalid opcode for WIDE: " + opcode;
-
- setStackProduction(getPC(), opcode);
- codeArray.addU1(JOpcode.cWIDE);
- codeArray.addU1(opcode.code);
- codeArray.addU2(index);
- codeArray.addU2(constant);
- }
-
- protected void emitU1(JOpcode opcode, int i1) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU1(i1);
- }
-
- protected void emitU1U1(JOpcode opcode, int i1, int i2) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU1(i1);
- codeArray.addU1(i2);
- }
-
- protected void emitU2(JOpcode opcode, int i1) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU2(i1);
- }
-
- protected void emitU2U1(JOpcode opcode, int i1, int i2) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU2(i1);
- codeArray.addU1(i2);
- }
-
- protected void emitU2U1U1(JOpcode opcode, int i1, int i2, int i3) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU2(i1);
- codeArray.addU1(i2);
- codeArray.addU1(i3);
- }
-
- protected void emitU4(JOpcode opcode, int i1) {
- setStackProduction(getPC(), opcode);
- codeArray.addU1(opcode.code);
- codeArray.addU4(i1);
- }
-
- protected int getU1(int sourcePos) {
- return codeArray.getU1(sourcePos);
- }
-
- protected int getU2(int sourcePos) {
- return codeArray.getU2(sourcePos);
- }
-
- protected int getU4(int sourcePos) {
- return codeArray.getU4(sourcePos);
- }
-
- protected int getS1(int sourcePos) {
- return codeArray.getS1(sourcePos);
- }
-
- protected int getS2(int sourcePos) {
- return codeArray.getS2(sourcePos);
- }
-
- protected int getS4(int sourcePos) {
- return codeArray.getS4(sourcePos);
- }
-
- // Stack size computation
- //////////////////////////////////////////////////////////////////////
-
- protected int getStackProduction(int pc) {
- if (stackProduction == null || pc >= stackProduction.length)
- return UNKNOWN_STACK_SIZE;
- else
- return stackProduction[pc];
- }
-
- protected void setStackProduction(int pc, int production) {
- if (stackProduction == null) {
- stackProduction = new int[256];
- Arrays.fill(stackProduction, UNKNOWN_STACK_SIZE);
- } else {
- while (pc >= stackProduction.length) {
- int[] newStackProduction = new int[stackProduction.length * 2];
- System.arraycopy(stackProduction, 0,
- newStackProduction, 0,
- stackProduction.length);
- Arrays.fill(newStackProduction,
- stackProduction.length,
- newStackProduction.length,
- UNKNOWN_STACK_SIZE);
- stackProduction = newStackProduction;
- }
- }
- stackProduction[pc] = production;
- }
-
- protected void setStackProduction(int pc, JOpcode opcode) {
- // TODO we should instead check whether the opcode has known
- // stack consumption/production.
- if (getStackProduction(pc) == UNKNOWN_STACK_SIZE)
-// && opcode.hasKnownProducedDataSize()
-// && opcode.hasKnownConsumedDataSize())
- setStackProduction(pc,
- opcode.getProducedDataSize()
- - opcode.getConsumedDataSize());
- }
-
- protected int computeMaxStackSize() {
- if (stackSizes == null) {
- stackSizes = new int[getSize()];
- Arrays.fill(stackSizes, UNKNOWN_STACK_SIZE);
- stackSizes[0] = 0;
- }
- int size = computeMaxStackSize(0, 0, 0);
-
- // compute stack sizes for exception handlers too
- ExceptionHandler exh = null;
- for (Iterator it = exceptionHandlers.iterator();
- it.hasNext();) {
- exh = (ExceptionHandler)it.next();
- int exhSize = computeMaxStackSize(exh.getHandlerPC(), 1, 1);
- if (size < exhSize)
- size = exhSize;
- }
-
- return size;
- }
-
- protected int computeMaxStackSize(int pc, int stackSize, int maxStackSize) {
- JCodeIterator iterator = new JCodeIterator(this, pc);
- for (;;) {
- int successors = iterator.getSuccessorCount();
- if (successors == 0)
- return maxStackSize;
- else {
- assert stackProduction[iterator.getPC()] != UNKNOWN_STACK_SIZE
- : "unknown stack production, pc=" + iterator.getPC()
- + " in method " + owner.getName();
- stackSize += stackProduction[iterator.getPC()];
- if (stackSize > maxStackSize)
- maxStackSize = stackSize;
- int nextPC = -1;
- for (int i = 0; i < successors; ++i) {
- int succPC = iterator.getSuccessorPC(i);
- assert succPC >= 0 && succPC < stackSizes.length
- : iterator.getPC() + ": invalid pc: " + succPC
- + " op: " + iterator.getOpcode();
- if (stackSizes[succPC] == UNKNOWN_STACK_SIZE) {
- stackSizes[succPC] = stackSize;
- if (nextPC == -1)
- nextPC = succPC;
- else
- maxStackSize = computeMaxStackSize(succPC,
- stackSize,
- maxStackSize);
- }
- }
- if (nextPC == -1)
- return maxStackSize;
- else
- iterator.moveTo(nextPC);
- }
- }
- }
-
- // Labels
- //////////////////////////////////////////////////////////////////////
-
- public static class OffsetTooBigException extends Exception {
- public OffsetTooBigException() { super(); }
- public OffsetTooBigException(String message) { super(message); }
- }
-
- protected void checkOffset16(int offset) throws OffsetTooBigException {
- if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE)
- throw new OffsetTooBigException("offset too big to fit"
- + " in 16 bits: " + offset);
- }
-
- public class Label {
- protected boolean anchored = false;
- protected int targetPC = 0;
-
- public void anchorToNext() {
- assert !anchored;
- this.targetPC = getPC();
- anchored = true;
- }
-
- public int getAnchor() {
- assert anchored;
- return targetPC;
- }
-
- protected int getOffset16(int pc, int instrPC)
- throws OffsetTooBigException {
- if (anchored) {
- int offset = targetPC - instrPC;
- checkOffset16(offset);
- return offset;
- } else {
- recordOffsetToPatch(pc, 16, instrPC, this);
- return 0;
- }
- }
-
- protected int getOffset32(int pc, int instrPC) {
- if (anchored)
- return targetPC - instrPC;
- else {
- recordOffsetToPatch(pc, 32, instrPC, this);
- return 0;
- }
- }
- }
-
- public Label newLabel() {
- return new Label();
- }
-
- public Label[] newLabels(int count) {
- Label[] labels = new Label[count];
- for (int i = 0; i < labels.length; ++i)
- labels[i] = newLabel();
- return labels;
- }
-
- protected static class OffsetToPatch {
- public final int pc;
- public final int size;
- public final int instrPC;
- public final Label label;
-
- public OffsetToPatch(int pc, int size, int instrPC, Label label) {
- this.pc = pc;
- this.size = size;
- this.instrPC = instrPC;
- this.label = label;
- }
- }
-
- protected void recordOffsetToPatch(int offsetPC,
- int size,
- int instrPC,
- Label label) {
- offsetToPatch.add(new OffsetToPatch(offsetPC, size, instrPC, label));
- }
-
- protected void patchAllOffset() throws OffsetTooBigException {
- Iterator offsetIt = offsetToPatch.iterator();
- while (offsetIt.hasNext()) {
- OffsetToPatch offset = (OffsetToPatch)offsetIt.next();
- int offsetValue = offset.label.getAnchor() - offset.instrPC;
- if (offset.size == 16) {
- checkOffset16(offsetValue);
- codeArray.putU2(offset.pc, offsetValue);
- } else
- codeArray.putU4(offset.pc, offsetValue);
- }
- }
-
- // Exception handling
- //////////////////////////////////////////////////////////////////////
-
- public class ExceptionHandler {
- protected int startPC, endPC, handlerPC;
- protected final String catchType;
- protected final int catchTypeIndex;
-
- public void setStartPC(int pc) {
- this.startPC = pc;
- }
-
- public int getStartPC() {
- return this.startPC;
- }
-
- public void setEndPC(int pc) {
- this.endPC = pc;
- }
-
- public int getEndPC() {
- return this.endPC;
- }
-
- public void setHandlerPC(int pc) {
- this.handlerPC = pc;
- }
-
- public int getHandlerPC() {
- return this.handlerPC;
- }
-
- public ExceptionHandler(String catchType) {
- this(0, 0, 0, catchType);
- }
-
- public ExceptionHandler(int startPC,
- int endPC,
- int handlerPC,
- String catchType) {
- this.startPC = startPC;
- this.endPC = endPC;
- this.handlerPC = handlerPC;
- this.catchType = catchType;
- this.catchTypeIndex = (catchType == null
- ? 0
- : pool.addClass(catchType));
- }
-
- public ExceptionHandler(DataInputStream stream) throws IOException {
- this.startPC = stream.readShort();
- this.endPC = stream.readShort();
- this.handlerPC = stream.readShort();
- this.catchTypeIndex = stream.readShort();
- this.catchType = (catchTypeIndex == 0
- ? null
- : pool.lookupClass(catchTypeIndex));
- }
-
- public void writeTo(DataOutputStream stream) throws IOException {
- stream.writeShort(startPC);
- stream.writeShort(endPC);
- stream.writeShort(handlerPC);
- stream.writeShort(catchTypeIndex);
- }
-
- // Follows javap output format for exception handlers.
- /*@Override*/public String toString() {
- StringBuffer buf = new StringBuffer(" ");
- if (startPC < 10) buf.append(" ");
- buf.append(startPC);
- buf.append(" ");
- if (endPC < 10) buf.append(" ");
- buf.append(endPC);
- buf.append(" ");
- buf.append(handlerPC);
- buf.append(" ");
- if (catchType != null) {
- buf.append("Class ");
- buf.append(catchType);
- }
- else
- buf.append("any");
- return buf.toString();
- }
-
- }
-
- public void addExceptionHandler(ExceptionHandler handler) {
- assert !frozen;
- exceptionHandlers.add(handler);
- }
-
- public void addExceptionHandler(int startPC,
- int endPC,
- int handlerPC,
- String catchType) {
- addExceptionHandler(new ExceptionHandler(startPC,
- endPC,
- handlerPC,
- catchType));
- }
-
- public void addFinallyHandler(int startPC, int endPC, int handlerPC) {
- assert !frozen;
- addExceptionHandler(startPC, endPC, handlerPC, null);
- }
-
- public List/*<ExceptionHandler>*/ getExceptionHandlers() {
- return exceptionHandlers;
- }
-
- // Line numbers
- //////////////////////////////////////////////////////////////////////
-
- protected int[] lineNumbers = null;
- protected void ensureLineNumberCapacity(int endPC) {
- assert !frozen;
- if (lineNumbers == null) {
- lineNumbers = new int[endPC];
- addAttribute(context.JLineNumberTableAttribute(owner.getOwner(),
- this));
- } else if (lineNumbers.length < endPC) {
- int[] newLN = new int[Math.max(endPC, lineNumbers.length * 2)];
- System.arraycopy(lineNumbers, 0, newLN, 0, lineNumbers.length);
- lineNumbers = newLN;
- }
- }
-
- /**
- * Set all line numbers in the interval [startPC, endPC) to
- * line, overwriting existing line numbers.
- */
- public void setLineNumber(int startPC, int endPC, int line) {
- ensureLineNumberCapacity(endPC);
- Arrays.fill(lineNumbers, startPC, endPC, line);
- }
-
- public void setLineNumber(int instrPC, int line) {
- setLineNumber(instrPC, instrPC + 1, line);
- }
-
- /** Sets all non-filled line numbers in the interval [startPC, endPC)
- * to 'line'.
- */
- public void completeLineNumber(int startPC, int endPC, int line) {
- ensureLineNumberCapacity(endPC);
- for (int pc = startPC; pc < endPC; ++pc)
- if (lineNumbers[pc] == 0) lineNumbers[pc] = line;
- }
-
- public int[] getLineNumbers() {
- assert frozen;
- if (lineNumbers == null) return new int[0];
- else if (lineNumbers.length == getPC()) return lineNumbers;
- else {
- int[] trimmedLN = new int[getPC()];
- System.arraycopy(lineNumbers, 0,
- trimmedLN, 0,
- Math.min(lineNumbers.length, trimmedLN.length));
- return trimmedLN;
- }
- }
-
- // Output
- //////////////////////////////////////////////////////////////////////
-
- public void writeTo(DataOutputStream stream) throws IOException {
- assert frozen;
- stream.writeInt(getSize());
- codeArray.writeTo(stream);
- }
-
- // Follows javap output format for opcodes.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer();
- JOpcode opcode = null;
- int pc = 0, addr = 0;
- while (pc < codeArray.getSize()) {
- buf.append("\n ");
- buf.append(pc);
- buf.append(":\t");
- opcode = JOpcode.OPCODES[codeArray.getU1(pc)];
- buf.append(decode(opcode, pc));
- if (opcode.code == JOpcode.cTABLESWITCH ||
- opcode.code == JOpcode.cLOOKUPSWITCH) {
- addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data
- int low = codeArray.getU4(addr);
- int high = codeArray.getU4(addr+4);
- pc = addr + (2/*low+high*/ + (high - low + 1)/*targets*/) * 4;
- } else
- pc += opcode.getSize();
- }
- if (exceptionHandlers.size() > 0) {
- buf.append("\n Exception table:\n from to target type\n");
- Iterator it = exceptionHandlers.iterator();
- while (it.hasNext()) {
- ExceptionHandler exh = (ExceptionHandler)it.next();
- buf.append(exh);
- buf.append("\n");
- }
- }
- return buf.toString();
- }
-
- private String decode(JOpcode opcode, int pc) {
- String ownerClassName = owner.getOwner().getName();
- int data, data2;
- StringBuilder buf = new StringBuilder();
- buf.append(opcode.name.toLowerCase());
- switch (opcode.code) {
- case JOpcode.cALOAD: case JOpcode.cASTORE: case JOpcode.cBIPUSH:
- case JOpcode.cDLOAD: case JOpcode.cDSTORE:
- case JOpcode.cFLOAD: case JOpcode.cFSTORE:
- case JOpcode.cILOAD: case JOpcode.cISTORE:
- case JOpcode.cLLOAD: case JOpcode.cLSTORE:
- data = codeArray.getU1(pc+1);
- buf.append("\t");
- buf.append(data);
- break;
- case JOpcode.cLDC:
- data = codeArray.getU1(pc+1);
- buf.append("\t#");
- buf.append(data);
- buf.append("; ");
- buf.append(pool.lookupEntry(data).toComment(ownerClassName));
- break;
- case JOpcode.cNEWARRAY:
- data = codeArray.getU1(pc+1);
- buf.append(" ");
- buf.append(JType.tagToString(data));
- break;
- case JOpcode.cIINC:
- data = codeArray.getU1(pc+1);
- data2 = codeArray.getU1(pc+2);
- buf.append("\t");
- buf.append(data);
- buf.append(", ");
- buf.append(data2);
- break;
- case JOpcode.cSIPUSH:
- data = codeArray.getU2(pc+1);
- buf.append("\t");
- buf.append(data);
- break;
- case JOpcode.cANEWARRAY: case JOpcode.cCHECKCAST:
- case JOpcode.cGETFIELD: case JOpcode.cGETSTATIC:
- case JOpcode.cINSTANCEOF:
- case JOpcode.cINVOKESPECIAL: case JOpcode.cINVOKESTATIC:
- case JOpcode.cINVOKEVIRTUAL:
- case JOpcode.cLDC_W: case JOpcode.cLDC2_W: case JOpcode.cNEW:
- case JOpcode.cPUTFIELD: case JOpcode.cPUTSTATIC:
- data = codeArray.getU2(pc+1);
- buf.append("\t#");
- buf.append(data);
- buf.append("; ");
- buf.append(pool.lookupEntry(data).toComment(ownerClassName));
- break;
- case JOpcode.cIF_ACMPEQ: case JOpcode.cIF_ACMPNE:
- case JOpcode.cIFEQ: case JOpcode.cIFGE: case JOpcode.cIFGT:
- case JOpcode.cIFLE: case JOpcode.cIFLT: case JOpcode.cIFNE:
- case JOpcode.cIFNONNULL: case JOpcode.cIFNULL:
- case JOpcode.cIF_ICMPEQ: case JOpcode.cIF_ICMPGE:
- case JOpcode.cIF_ICMPGT: case JOpcode.cIF_ICMPLE:
- case JOpcode.cIF_ICMPLT: case JOpcode.cIF_ICMPNE:
- data = codeArray.getU2(pc+1); // maybe S2 offset
- buf.append("\t");
- buf.append(pc+data);
- break;
- case JOpcode.cGOTO:
- data = codeArray.getS2(pc+1); // always S2 offset
- buf.append("\t");
- buf.append(pc+data);
- break;
- case JOpcode.cINVOKEINTERFACE:
- data = codeArray.getU2(pc+1);
- data2 = codeArray.getU1(pc+3);
- buf.append("\t#");
- buf.append(data);
- buf.append(", ");
- buf.append(data2);
- buf.append("; ");
- buf.append(pool.lookupEntry(data).toComment(ownerClassName));
- break;
- case JOpcode.cTABLESWITCH:
- buf.append("{ //");
- int addr = ((pc / 4 + 1) + 1) * 4; // U4 aligned data
- int low = codeArray.getU4(addr);
- int high = codeArray.getU4(addr+4);
- buf.append(low);
- buf.append(" to ");
- buf.append(high);
- for (int i = low; i <= high; ++i) {
- buf.append("\n\t\t");
- buf.append(i);
- buf.append(": ");
- buf.append(pc+codeArray.getU4(addr+(i-1)*4));
- buf.append(";");
- }
- buf.append("\n\t\tdefault: ");
- buf.append(pc+codeArray.getU4(addr-4));
- buf.append(" }");
- default:
- }
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
deleted file mode 100644
index 9f3fcf8c6a..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeAttribute.java
+++ /dev/null
@@ -1,125 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- * Code attribute, containing code of methods.
- *
- * A Code attribute contains the JVM instructions and auxiliary information
- * for a single method, instance initialization method, or class or interface
- * initialization method. See section 4.8.3 of the JVM specification.
- *
- * @author Michel Schinz, Stephane Micheloud
- * @version 1.1
- */
-
-public class JCodeAttribute extends JAttribute {
- protected final JCode code;
- protected final JMethod owner;
- protected static int UNKNOWN_STACK_SIZE = Integer.MIN_VALUE;
- protected final int maxStackSize;
- protected final int maxLocals;
-
- public JCodeAttribute(FJBGContext context, JClass clazz, JMethod owner) {
- super(context, clazz);
- this.owner = owner;
-
- this.maxStackSize = UNKNOWN_STACK_SIZE;
- this.maxLocals = 0; // unknown
- this.code = owner.getCode();
-
- assert clazz == owner.getOwner();
- }
-
- public JCodeAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.owner = (JMethod)owner;
-
- this.maxStackSize = stream.readShort();
- this.maxLocals = stream.readShort();
- this.code = context.JCode(clazz, (JMethod)owner, stream);
-
- int handlersCount = stream.readShort();
- for (int i = 0; i < handlersCount; ++i)
- code.addExceptionHandler(code.new ExceptionHandler(stream));
- List/*<JAttribute>*/ attributes =
- JAttribute.readFrom(context, clazz, code, stream);
- Iterator attrIt = attributes.iterator();
- while (attrIt.hasNext())
- code.addAttribute((JAttribute)attrIt.next());
-
- assert name.equals(getName());
- }
-
- public String getName() { return "Code"; }
-
- // Follows javap output format for Code attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Code:");
- buf.append("\n Stack=");
- buf.append(maxStackSize);
- buf.append(", Locals=");
- buf.append(maxLocals);
- buf.append(", Args_size=");
- buf.append(owner.getArgsSize());
- buf.append(code);
- buf.append("\n");
- Iterator it = code.getAttributes().iterator();
- while (it.hasNext()) {
- JAttribute attr = (JAttribute)it.next();
- buf.append(attr);
- buf.append("\n");
- }
- return buf.toString();
- }
-
- protected int getSize() {
- int handlersNum = code.getExceptionHandlers().size();
-
- int attrsSize = 0;
- Iterator attrsIt = code.getAttributes().iterator();
- while (attrsIt.hasNext()) {
- JAttribute attr = (JAttribute)attrsIt.next();
- attrsSize += attr.getSize() + 6;
- }
-
- return 2 // max stack
- + 2 // max locals
- + 4 // code size
- + code.getSize() // code
- + 2 // exception table size
- + 8 * handlersNum // exception table
- + 2 // attributes count
- + attrsSize; // attributes
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- List/*<ExceptionHandler>*/ handlers = code.getExceptionHandlers();
-
- stream.writeShort(code.getMaxStackSize());
- stream.writeShort(owner.getMaxLocals());
-
- code.writeTo(stream);
-
- stream.writeShort(handlers.size());
- Iterator handlerIt = handlers.iterator();
- while (handlerIt.hasNext())
- ((JCode.ExceptionHandler)handlerIt.next()).writeTo(stream);
- JAttribute.writeTo(code.getAttributes(), stream);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java b/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
deleted file mode 100644
index d09dfd19a4..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JCodeIterator.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import ch.epfl.lamp.util.ByteArray;
-
-/**
- * Iterator used to examine the contents of an instruction list.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JCodeIterator {
- protected final JCode code;
- protected final JConstantPool pool;
- protected final ByteArray codeArray;
-
- protected int pc;
- protected JOpcode opcode;
-
- /**
- * Creates a new code iterator with its instruction list
- * and its pc initialized to a given value.
- */
- public JCodeIterator(JCode code, int pc) {
- this.code = code;
- this.pool = code.getOwner().getOwner().getConstantPool();
- this.codeArray = code.codeArray;
- this.pc = pc;
- setOpcode();
- }
-
- public JCodeIterator(JCode code) {
- this(code, 0);
- }
-
- /**
- * Get the current program counter.
- * @return The current program counter.
- */
- public int getPC() { return pc; }
-
- /**
- * Searches the type of the instruction positionned at the
- * current address and updates the current instruction.
- */
- protected void setOpcode() {
- // TODO : check if the current pc is the beginning
- // of an instruction
- opcode = isValid() ? JOpcode.OPCODES[codeArray.getU1(pc)] : null;
- }
-
- /**
- * Returns the opcode of the current instruction.
- * @return The opcode of the current instruction.
- */
- public JOpcode getOpcode() {
- return opcode;
- }
-
- /**
- * Updates the program counter to an given value.
- * @param pc The new value of the program counter.
- */
- public void moveTo(int pc) {
- this.pc = pc;
- setOpcode();
- }
-
- /**
- * Check the validity of the iterator.
- * @return true iff the iterator points to a valid address.
- */
- public boolean isValid() {
- return pc < codeArray.getSize();
- }
-
- /**
- * Updates the current instruction with the next one in the
- * sense of their position in the code.
- */
- public void moveToNext() {
- moveTo(pc + getInstructionSize());
- }
-
- /**
- * Updates the current instruction with a specific successor
- * of it.
- * @param succ The index of the wanted successor in the list of
- * the successors of the current instruction.
- */
- public void moveToSuccessor(int succ) {
- moveTo(getSuccessorPC(succ));
- }
-
- /**
- * Updates the current instruction with the one positionned
- * at a given index relatively to the actual program counter
- * @param offset The relative position of the instruction
- * compared with the position of the current one
- */
- public void moveRelatively(int offset) {
- moveTo(pc + offset);
- }
-
- /**
- * Returns the size in bytes of the current instruction.
- * @return The size in bytes of the current instruction.
- */
- public int getInstructionSize() {
- if (opcode.size != JOpcode.UNKNOWN) {
- return opcode.size;
- } else if (opcode == JOpcode.TABLESWITCH) {
- int lowOffset = 1 + pad4(pc + 1) + 4;
- int low = codeArray.getS4(pc + lowOffset);
- int high = codeArray.getS4(pc + lowOffset + 4);
- return lowOffset + 8 + 4 * (high - low + 1);
- } else if (opcode == JOpcode.LOOKUPSWITCH) {
- int npairsOffset = 1 + pad4(pc + 1) + 4;
- int npairs = codeArray.getS4(pc + npairsOffset);
- return npairsOffset + 4 + 8 * npairs;
- } else if (opcode == JOpcode.WIDE) {
- if (codeArray.getU1(pc + 1) == JOpcode.cIINC)
- return 6;
- else
- return 4;
- } else
- throw new Error("Unknown size for instruction " + opcode);
- }
-
- /**
- * Returns the number of successors of the current instruction.
- * @return The number of successors of the current instruction.
- */
- public int getSuccessorCount() {
- if (opcode.successorCount != JOpcode.UNKNOWN) {
- return opcode.successorCount;
- } else if (opcode == JOpcode.TABLESWITCH) {
- int lowPos = pc + 1 + pad4(pc + 1) + 4;
- return 1 // default case
- + codeArray.getS4(lowPos + 4) // value of HIGH field
- - codeArray.getS4(lowPos) + 1; // value of LOW field
- } else if (opcode == JOpcode.LOOKUPSWITCH) {
- int npairsPos = pc + 1 + pad4(pc + 1) + 4;
- return 1 + codeArray.getS4(npairsPos);
- } else
- throw new Error("Unknown successors for instruction " + opcode);
- }
-
- /**
- * Returns the address of the successor of the current instruction
- * given its index in the list of successors of the current
- * instruction.
- * @param index The index of the wanted successor in the list of
- * the successors of the current instruction.
- * @return The address of the specific successor.
- */
- public int getSuccessorPC(int index) {
- assert (index >= 0) && (index < getSuccessorCount()) : index;
-
- switch (opcode.jumpKind) {
- case JOpcode.JMP_NEXT:
- return pc + getInstructionSize();
- case JOpcode.JMP_ALWAYS_S2_OFFSET:
- return pc + codeArray.getS2(pc + 1);
- case JOpcode.JMP_ALWAYS_S4_OFFSET:
- return pc + codeArray.getS4(pc + 1);
- case JOpcode.JMP_MAYBE_S2_OFFSET:
- if (index == 0)
- return pc + getInstructionSize();
- else
- return pc + codeArray.getS2(pc + 1);
- case JOpcode.JMP_TABLE: {
- int defaultPos = pc + 1 + pad4(pc + 1);
- if (index == 0)
- return pc + codeArray.getS4(defaultPos);
- else
- return pc + codeArray.getS4(defaultPos + 3*4 + 4 * (index - 1));
- }
- case JOpcode.JMP_LOOKUP: {
- int defaultPos = pc + 1 + pad4(pc + 1);
- if (index == 0)
- return pc + codeArray.getS4(defaultPos);
- else
- return pc + codeArray.getS4(defaultPos + 2*4 + 4 + 8 * (index - 1));
- }
- default:
- throw new Error();
- }
- }
-
- /**
- * Returns the total size of data words put on the stack by the current
- * instruction.
- * @return The total size of data words put on the stack by the current
- * instruction.
- */
- public int getProducedDataSize() {
- if (opcode.getProducedDataTypes() == JOpcode.UNKNOWN_TYPE) {
- switch (opcode.code) {
- case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cBALOAD:
- return 1;
- case JOpcode.cLDC2_W: case JOpcode.cDUP: case JOpcode.cSWAP:
- return 2;
- case JOpcode.cDUP_X1:
- return 3;
- case JOpcode.cDUP_X2: case JOpcode.cDUP2:
- return 4;
- case JOpcode.cDUP2_X1:
- return 5;
- case JOpcode.cDUP2_X2:
- return 6;
- case JOpcode.cGETSTATIC: case JOpcode.cGETFIELD: {
- JConstantPool.FieldOrMethodRefEntry entry =
- (JConstantPool.FieldOrMethodRefEntry)
- pool.lookupEntry(codeArray.getU2(pc + 1));
- return JType.parseSignature(entry.getSignature()).getSize();
- }
- case JOpcode.cWIDE : {
- int op = codeArray.getU1(pc + 1);
- if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD) {
- JOpcode opcode2 = JOpcode.OPCODES[op];
- return JType.getTotalSize(opcode2.getProducedDataTypes());
- } else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE)
- return 0;
- else return 0; // (IINC)
- }
- default :
- throw new Error(opcode.toString());
- }
- } else
- return JType.getTotalSize(opcode.getProducedDataTypes());
- }
-
- /**
- * Returns the total size of data words taken from the stack by the current
- * instruction.
- * @return The total size of data words taken from the stack by the current
- * instruction.
- */
- public int getConsumedDataSize() {
- if (opcode.getConsumedDataTypes() != JOpcode.UNKNOWN_TYPE)
- return JType.getTotalSize(opcode.getConsumedDataTypes());
- else {
- switch (opcode.code) {
- case JOpcode.cPOP: case JOpcode.cDUP:
- return 1;
- case JOpcode.cPOP2: case JOpcode.cSWAP:
- case JOpcode.cDUP_X1: case JOpcode.cDUP2:
- return 2;
- case JOpcode.cDUP_X2: case JOpcode.cDUP2_X1:
- return 3;
- case JOpcode.cDUP2_X2:
- return 4;
- case JOpcode.cPUTSTATIC: case JOpcode.cPUTFIELD: {
- JConstantPool.FieldOrMethodRefEntry entry =
- (JConstantPool.FieldOrMethodRefEntry)
- pool.lookupEntry(codeArray.getU2(pc + 1));
- return JType.parseSignature(entry.getSignature()).getSize();
- }
- case JOpcode.cINVOKEVIRTUAL: case JOpcode.cINVOKESPECIAL:
- case JOpcode.cINVOKESTATIC: case JOpcode.cINVOKEINTERFACE : {
- JConstantPool.FieldOrMethodRefEntry entry =
- (JConstantPool.FieldOrMethodRefEntry)
- pool.lookupEntry(codeArray.getU2(pc + 1));
- JMethodType tp = (JMethodType)
- JType.parseSignature(entry.getSignature());
- return tp.getArgsSize()
- + (opcode == JOpcode.INVOKESTATIC ? 0 : 1);
- }
- case JOpcode.cWIDE : {
- int op = codeArray.getU1(pc + 1);
- if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD)
- return 0;
- else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE) {
- JOpcode opcode2 = JOpcode.OPCODES[op];
- return JType.getTotalSize(opcode2.getConsumedDataTypes());
- } else
- return 0; // (IINC)
- }
- case JOpcode.cMULTIANEWARRAY :
- return codeArray.getU1(pc + 3);
- default:
- throw new Error(opcode.toString());
- }
- }
- }
-
- /**
- * Returns the number of data types put on the stack by the current
- * instruction.
- * @return The number of data types put on the stack by the current
- * instruction.
- */
- public int getProducedDataTypesNumber() {
- if (opcode.getProducedDataTypes() != JOpcode.UNKNOWN_TYPE)
- return opcode.getProducedDataTypes().length;
- else {
- switch (opcode.code) {
- case JOpcode.cLDC: case JOpcode.cLDC_W: case JOpcode.cLDC2_W:
- case JOpcode.cBALOAD: case JOpcode.cGETSTATIC:
- case JOpcode.cGETFIELD:
- return 1;
- case JOpcode.cDUP: case JOpcode.cSWAP:
- return 2;
- case JOpcode.cDUP_X1:
- return 3;
- case JOpcode.cWIDE: {
- int op = codeArray.getU1(pc + 1);
- if (op >= JOpcode.cILOAD && op <= JOpcode.cALOAD)
- return 1;
- else if (op >= JOpcode.cISTORE && op <= JOpcode.cASTORE)
- return 0;
- else
- return 0; // (IINC)
- }
- default:
- throw new Error("JOpcode implementation error");
- }
- }
- }
-
- /**
- * Returns the number of data types taken from the stack by the current
- * instruction.
- * @return The number of data types taken from the stack by the current
- * instruction.
- */
-// public int getConsumedDataTypesNumber() {
-// if (opcode.getConsumedDataTypes() == JOpcode.UNKNOWN_TYPE) {
-// switch (opcode.code) {
-// case 87 : return 1; // POP
-// case 88 : return 2; // POP2
-// case 89 : return 1; // DUP
-// case 90 : return 2; // DUP_X1
-// case 91 : // DUP_X2
-// case 92 : // DUP2
-// case 93 : // DUP2_X1
-// case 94 : // DUP2_X2
-// throw new UnsupportedOperationException("Opcode " + opcode.name
-// + " has a stack-dependant"
-// + " data types consumption");
-// case 95 : return 2; // SWAP
-// case 179 : return 1; // PUTSTATIC
-// case 181 : return 1; // PUTFIELD
-// case 182 : // INVOKEVIRTUAL
-// case 183 : // INVOKESPECIAL
-// case 185 : // INVOKEINTERFACE
-// s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3];
-// return ((JMethodType)JType.parseSignature(s)).argTypes.length + 1;
-// case 184 : // INVOKESTATIC
-// s = epool.getClassMethodRef(codeArray.getU2(pc + 1)).split(" ")[3];
-// return ((JMethodType)JType.parseSignature(s)).argTypes.length;
-// case 196 : // WIDE
-// int op = codeArray.getU1(pc + 1);
-// if (op >= 21 && op <= 25) return 0; // (xLOAD)
-// else if (op >= 54 && op <= 58) // (xSTORE)
-// return JOpcode.OPCODES[op].getConsumedDataTypes().length;
-// else return 0; // (IINC)
-// case 197 : return codeArray.getU1(pc + 3); // MULTIANEWARRAY
-// default : throw new Error("JOpcode implementation error");
-// }
-// } else return opcode.getConsumedDataTypes().length;
-// }
-
-
- // Return the number between 0 and 3 which, if added to the given
- // value, would yield a multiple of 4.
- protected int[] padding = { 0, 3, 2, 1 };
- protected int pad4(int value) {
- return padding[value % 4];
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
deleted file mode 100644
index 9867e01b25..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantPool.java
+++ /dev/null
@@ -1,771 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.HashMap;
-
-/**
- * Constant pool, holding constants for a Java class file.
- *
- * @author Michel Schinz
- * @version 2.0
- */
-
-public class JConstantPool {
- protected boolean frozen = false;
-
- protected HashMap/*<Entry,Integer>*/ entryToIndex = new HashMap();
- protected Entry[] indexToEntry;
- protected int currIndex;
-
- public static final short CONSTANT_Utf8 = 1;
- public static final short CONSTANT_Integer = 3;
- public static final short CONSTANT_Float = 4;
- public static final short CONSTANT_Long = 5;
- public static final short CONSTANT_Double = 6;
- public static final short CONSTANT_Class = 7;
- public static final short CONSTANT_String = 8;
- public static final short CONSTANT_Fieldref = 9;
- public static final short CONSTANT_Methodref = 10;
- public static final short CONSTANT_InterfaceMethodref = 11;
- public static final short CONSTANT_NameAndType = 12;
-
- protected JConstantPool(FJBGContext context) {
- indexToEntry = new Entry[8];
- currIndex = 1;
- }
-
- protected JConstantPool(FJBGContext context, DataInputStream stream)
- throws IOException {
- int count = stream.readShort();
- indexToEntry = new EntryIndex[count];
-
- currIndex = 1;
- while (currIndex < count) {
- EntryIndex e;
- int tag = stream.readByte();
-
- switch (tag) {
- case CONSTANT_Utf8:
- e = new Utf8Entry(stream);
- // no duplicates
- entryToIndex.put(e, new Integer(currIndex));
- break;
- case CONSTANT_Integer:
- e = new IntegerEntry(stream);
- break;
- case CONSTANT_Float:
- e = new FloatEntry(stream);
- break;
- case CONSTANT_Long:
- e = new LongEntry(stream);
- break;
- case CONSTANT_Double:
- e = new DoubleEntry(stream);
- break;
- case CONSTANT_Class:
- e = new DescriptorEntryIndex(stream);
- break;
- case CONSTANT_String:
- e = new StringEntryIndex(stream);
- break;
- case CONSTANT_Fieldref:
- case CONSTANT_Methodref:
- case CONSTANT_InterfaceMethodref:
- e = new FieldOrMethodRefEntryIndex(tag, stream);
- break;
- case CONSTANT_NameAndType:
- e = new NameAndTypeEntryIndex(stream);
- break;
- default:
- throw new IllegalArgumentException("unknown entry in pool: " + tag);
- }
- indexToEntry[currIndex] = e;
- currIndex += e.getSize();
- }
- }
-
- public void freeze() { frozen = true; }
-
- /**
- * Returns a string representing the type of an entry
- * knowing its tag
- * @param tag The tag representing the type of the
- * constant pool entry
- */
- public String getEntryType(int tag) {
- switch (tag) {
- case CONSTANT_Utf8 : return "Utf8";
- case CONSTANT_Integer : return "Integer";
- case CONSTANT_Float : return "Float";
- case CONSTANT_Long : return "Long";
- case CONSTANT_Double : return "Double";
- case CONSTANT_Class : return "Class";
- case CONSTANT_String : return "String";
- case CONSTANT_Fieldref : return "Field";
- case CONSTANT_Methodref : return "Method";
- case CONSTANT_InterfaceMethodref : return "InterfaceMethod";
- case CONSTANT_NameAndType : return "NameAndType";
- default : throw new Error("invalid constant pool tag : " + tag);
- }
- }
-
- public int addClass(String className) {
- return addDescriptor(className.replace('.', '/'));
- }
-
- public int addDescriptor(JReferenceType type) {
- return addDescriptor(type.getDescriptor());
- }
-
- protected int addDescriptor(String name) {
- return addEntry(new DescriptorEntryValue(name));
- }
-
- public int addClassMethodRef(String className,
- String methodName,
- String signature) {
- return addMethodRef(true, className, methodName, signature);
- }
-
- public int addInterfaceMethodRef(String className,
- String methodName,
- String signature) {
- return addMethodRef(false, className, methodName, signature);
- }
-
- public int addMethodRef(boolean isClass,
- String className,
- String methodName,
- String signature) {
- return addEntry(new FieldOrMethodRefEntryValue(isClass
- ? CONSTANT_Methodref
- : CONSTANT_InterfaceMethodref,
- className,
- methodName,
- signature));
- }
-
- public int addFieldRef(String className,
- String fieldName,
- String signature) {
- return addEntry(new FieldOrMethodRefEntryValue(CONSTANT_Fieldref,
- className,
- fieldName,
- signature));
- }
-
- public int addInteger(int value) {
- return addEntry(new IntegerEntry(value));
- }
-
- public int addFloat(float value) {
- return addEntry(new FloatEntry(value));
- }
-
- public int addLong(long value) {
- return addEntry(new LongEntry(value));
- }
-
- public int addDouble(double value) {
- return addEntry(new DoubleEntry(value));
- }
-
- public int addString(String value) {
- return addEntry(new StringEntryValue(value));
- }
-
- public int addNameAndType(String name, String descriptor) {
- return addEntry(new NameAndTypeEntryValue(name, descriptor));
- }
-
- public int addUtf8(String value) {
- return addEntry(new Utf8Entry(value));
- }
-
- public int addUtf8(byte[] value) {
- return addEntry(new Utf8Entry(value));
- }
-
- protected int addEntry(EntryValue e) {
- assert !frozen;
- Integer idx = (Integer)entryToIndex.get(e);
- if (idx != null)
- return idx.intValue();
-
- e.addChildren();
-
- int index = currIndex;
- currIndex += e.getSize();
-
- entryToIndex.put(e, new Integer(index));
- if (index >= indexToEntry.length) {
- Entry[] newI2E = new Entry[indexToEntry.length * 2];
- System.arraycopy(indexToEntry, 0, newI2E, 0, indexToEntry.length);
- indexToEntry = newI2E;
- }
- indexToEntry[index] = e;
- return index;
- }
-
- /// Lookup methods
- //////////////////////////////////////////////////////////////////////
-
- public Entry lookupEntry(int index) {
- assert index > 0 && index < currIndex
- : "invalid index: " + index;
- assert indexToEntry[index] != null
- : "invalid index (null contents): " + index;
- return indexToEntry[index];
- }
-
- public String lookupClass(int index) {
- DescriptorEntry entry = (DescriptorEntry)lookupEntry(index);
- return entry.getValue();
- }
-
- public String lookupNameAndType(int index) {
- NameAndTypeEntry entry = (NameAndTypeEntry)lookupEntry(index);
- return entry.getName()+":"+entry.getDescriptor();
- }
-
- public String lookupUtf8(int index) {
- Utf8Entry entry = (Utf8Entry)lookupEntry(index);
- return entry.getValue();
- }
-
- /// Output
- //////////////////////////////////////////////////////////////////////
-
- public void writeTo(DataOutputStream stream) throws IOException {
- if (! frozen) freeze();
-
- stream.writeShort(currIndex);
- for (int i = 0; i < currIndex; ++i) {
- Entry entry = indexToEntry[i];
- if (entry != null) {
- stream.writeByte(entry.getTag());
- entry.writeContentsTo(stream);
- }
- }
- }
-
- // Follows javap output format for constant pool.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Constant pool:");
- for (int i = 0; i < currIndex; ++i) {
- Entry entry = indexToEntry[i];
- if (entry != null) {
- if (i > 0) buf.append("\n");
- buf.append("const #");
- buf.append(i);
- buf.append(" = ");
- buf.append(entry);
- }
- }
- buf.append("\n");
- return buf.toString();
- }
-
- /// Classes for the various kinds of entries
- //////////////////////////////////////////////////////////////////////
-
- public interface Entry {
- public int getTag();
-
- int getSize();
- void writeContentsTo(DataOutputStream stream) throws IOException;
- String toComment(String ownerClassName);
- }
-
- protected interface EntryValue extends Entry {
- abstract void addChildren();
- }
-
- protected interface EntryIndex extends Entry {
- abstract void fetchChildren();
- }
-
- abstract protected class ChildlessEntry implements EntryValue, EntryIndex {
- public void addChildren() {}
- public void fetchChildren() {}
- }
-
- public class IntegerEntry extends ChildlessEntry implements Entry {
- private final int value;
- public IntegerEntry(int value) { this.value = value; }
- public IntegerEntry(DataInputStream stream) throws IOException {
- this(stream.readInt());
- }
-
- public int hashCode() { return value; }
- public boolean equals(Object o) {
- return o instanceof IntegerEntry && ((IntegerEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Integer; }
- public int getValue() { return value; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeInt(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("int\t");
- buf.append(getValue());
- buf.append(";");
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//int "+getValue();
- }
- }
-
- public class FloatEntry extends ChildlessEntry implements Entry {
- private final float value;
- public FloatEntry(float value) { this.value = value; }
- public FloatEntry(DataInputStream stream) throws IOException {
- this(stream.readFloat());
- }
-
- public int hashCode() { return (int)value; }
- public boolean equals(Object o) {
- return o instanceof FloatEntry && ((FloatEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Float; }
- public float getValue() { return value; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeFloat(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("float\t");
- buf.append(getValue());
- buf.append("f");
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//float "+getValue()+"f";
- }
- }
-
- public class LongEntry extends ChildlessEntry implements Entry {
- private final long value;
- public LongEntry(long value) { this.value = value; }
- public LongEntry(DataInputStream stream) throws IOException {
- this(stream.readLong());
- }
-
- public int hashCode() { return (int)value; }
- public boolean equals(Object o) {
- return o instanceof LongEntry && ((LongEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Long; }
- public long getValue() { return value; }
-
- public int getSize() { return 2; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeLong(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("long\t");
- buf.append(getValue());
- buf.append("l;");
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//long "+getValue()+"l";
- }
- }
-
- public class DoubleEntry extends ChildlessEntry implements Entry {
- private final double value;
- public DoubleEntry(double value) { this.value = value; }
- public DoubleEntry(DataInputStream stream) throws IOException {
- this(stream.readDouble());
- }
-
- public int hashCode() { return (int)value; }
- public boolean equals(Object o) {
- return o instanceof DoubleEntry && ((DoubleEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_Double; }
- public double getValue() { return value; }
-
- public int getSize() { return 2; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeDouble(value);
- }
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer("double\t");
- buf.append(getValue());
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//double "+getValue();
- }
- }
-
- public class Utf8Entry extends ChildlessEntry implements Entry {
- private final String value;
- private final byte[] bytes;
- public Utf8Entry(String value) {
- this.value = value.intern();
- this.bytes = null;
- }
- public Utf8Entry(DataInputStream stream) throws IOException {
- this(stream.readUTF());
- }
- public Utf8Entry(byte[] bytes) {
- this.bytes = bytes;
- this.value = null;
- }
-
- public int hashCode() {
- if (bytes != null) return bytes.hashCode();
- return value.hashCode();
- }
- public boolean equals(Object o) {
- boolean isEqual = o instanceof Utf8Entry;
- if (bytes != null) {
- isEqual = isEqual && ((Utf8Entry)o).bytes == bytes;
- }
- else {
- isEqual = isEqual && ((Utf8Entry)o).value == value;
- }
- return isEqual;
- }
-
- public int getTag() { return CONSTANT_Utf8; }
- public String getValue() { return value; }
- public byte[] getBytes() { return bytes; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- if (bytes != null) {
- if (bytes.length > 65535) {
- throw new IOException("String literal of length " + bytes.length + " does not fit in Classfile");
- }
- stream.writeShort(bytes.length);
- stream.write(bytes);
- }
- else
- stream.writeUTF(value);
- }
- // Follows javap output format for Utf8 pool entries.
- public String toString() { return "Asciz\t"+escaped(getValue())+";"; }
- public String toComment(String ownerClassname) {
- return "//Asciz "+escaped(getValue());
- }
- private String escaped(String s) {
- return s.replace("\n", "\\n");
- }
- }
-
- abstract public class StringEntry implements Entry {
- protected String value;
- protected int valueIndex;
-
- public int hashCode() {
- assert value != null;
- return value.hashCode();
- }
- public boolean equals(Object o) {
- return o instanceof StringEntry && ((StringEntry)o).value == value;
- }
-
- public int getTag() { return CONSTANT_String; }
- public String getValue() { return value; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(valueIndex);
- }
- // Follows javap output format for String pool entries.
- public String toString() {
- return "String\t#"+valueIndex+";\t// "+escaped(getValue());
- }
- public String toComment(String ownerClassname) {
- return "//String "+escaped(getValue());
- }
- private String escaped(String s) {
- return s.replace("\n", "\\n");
- }
- }
-
- public class StringEntryValue extends StringEntry implements EntryValue {
- public StringEntryValue(String value) {
- this.value = value.intern();
- }
- public void addChildren() {
- valueIndex = addUtf8(value);
- }
- }
-
- public class StringEntryIndex extends StringEntry implements EntryIndex {
- public StringEntryIndex(int valueIndex) {
- this.valueIndex = valueIndex;
- }
- public StringEntryIndex(DataInputStream stream) throws IOException {
- this(stream.readShort());
- }
- public String getValue() {
- if (value == null) fetchChildren();
- return super.getValue();
- }
- public void fetchChildren() {
- value = lookupUtf8(valueIndex);
- }
- }
-
- abstract public class DescriptorEntry implements Entry {
- protected String name;
- protected int nameIndex;
-
- public int hashCode() {
- assert name != null;
- return name.hashCode();
- }
- public boolean equals(Object o) {
- return o instanceof DescriptorEntry && ((DescriptorEntry)o).name == name;
- }
-
- public int getTag() { return CONSTANT_Class; }
- public String getValue() { return name; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(nameIndex);
- }
- // Follows javap output format for class pool entries.
- public String toString() {
- StringBuffer buf = new StringBuffer("class\t#");
- buf.append(nameIndex);
- buf.append(";\t// ");
- buf.append(getClassName());
- return buf.toString();
- }
- public String toComment(String ownerClassname) {
- return "//class "+getClassName();
- }
- private String getClassName() {
- StringBuffer buf = new StringBuffer();
- String value = getValue();
- if (value.startsWith("[")) buf.append("\"");
- buf.append(value);
- if (value.startsWith("[")) buf.append("\"");
- return buf.toString();
- }
- }
-
- protected class DescriptorEntryValue
- extends DescriptorEntry
- implements EntryValue {
- public DescriptorEntryValue(String name) { this.name = name.intern(); }
- public void addChildren() {
- nameIndex = addUtf8(name);
- }
- }
-
- protected class DescriptorEntryIndex
- extends DescriptorEntry
- implements EntryIndex {
- public DescriptorEntryIndex(int nameIndex) { this.nameIndex = nameIndex; }
- public DescriptorEntryIndex(DataInputStream stream) throws IOException {
- this(stream.readShort());
- }
- public String getValue() {
- if (name == null) fetchChildren();
- return super.getValue();
- }
- public void fetchChildren() {
- name = lookupUtf8(nameIndex);
- }
- }
-
- abstract public class FieldOrMethodRefEntry implements Entry {
- private final int tag;
- protected String className, thingName, signature;
- protected int classIndex, nameAndTypeIndex;
-
- public FieldOrMethodRefEntry(int tag) {
- assert tag == CONSTANT_Fieldref
- || tag == CONSTANT_Methodref
- || tag == CONSTANT_InterfaceMethodref;
-
- this.tag = tag;
- }
-
- public int hashCode() {
- return tag
- + className.hashCode()
- + thingName.hashCode()
- + signature.hashCode();
- }
- public boolean equals(Object o) {
- return o instanceof FieldOrMethodRefEntry
- && ((FieldOrMethodRefEntry)o).tag == tag
- && ((FieldOrMethodRefEntry)o).className == className
- && ((FieldOrMethodRefEntry)o).thingName == thingName
- && ((FieldOrMethodRefEntry)o).signature == signature;
- }
-
- public int getTag() { return tag; }
- public String getClassName() { return className; }
- public String getFieldOrMethodName() { return thingName; }
- public String getSignature() { return signature; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(classIndex);
- stream.writeShort(nameAndTypeIndex);
- }
- // Follows javap output format for field/method pool entries.
- public String toString() {
- return getEntryType(tag)+"\t#"+classIndex+".#"+nameAndTypeIndex+
- ";\t// "+getName("")+":"+signature;
- }
- public String toComment(String ownerClassName) {
- return "//"+getEntryType(tag)+" "+getName(ownerClassName)+":"+signature;
- }
- private String getName(String ownerClassName) {
- String name = getFieldOrMethodName();
- if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(name))
- name = "\""+name+"\"";
- if (!getClassName().equals(ownerClassName))
- name = getClassName()+"."+name;
- return name;
- }
- }
-
- protected class FieldOrMethodRefEntryValue
- extends FieldOrMethodRefEntry
- implements EntryValue {
- public FieldOrMethodRefEntryValue(int tag,
- String className,
- String thingName,
- String signature) {
- super(tag);
- this.className = className.intern();
- this.thingName = thingName.intern();
- this.signature = signature.intern();
- }
-
- public void addChildren() {
- classIndex = addClass(className);
- nameAndTypeIndex = addNameAndType(thingName, signature);
- }
- }
-
- protected class FieldOrMethodRefEntryIndex
- extends FieldOrMethodRefEntry
- implements EntryIndex {
- public FieldOrMethodRefEntryIndex(int tag,
- int classIndex,
- int nameAndTypeIndex) {
- super(tag);
- this.classIndex = classIndex;
- this.nameAndTypeIndex = nameAndTypeIndex;
- }
- public FieldOrMethodRefEntryIndex(int tag, DataInputStream stream)
- throws IOException {
- this(tag, stream.readShort(), stream.readShort());
- }
- public String getClassName() {
- if (className == null) fetchChildren();
- return super.getClassName();
- }
- public String getFieldOrMethodName() {
- if (thingName == null) fetchChildren();
- return super.getFieldOrMethodName();
- }
- public String getSignature() {
- if (signature == null) fetchChildren();
- return super.getSignature();
- }
- public void fetchChildren() {
- className = lookupClass(classIndex);
- NameAndTypeEntry nat = (NameAndTypeEntry)lookupEntry(nameAndTypeIndex);
- thingName = nat.getName();
- signature = nat.getDescriptor();
- }
- }
-
- abstract public class NameAndTypeEntry implements Entry {
- protected String name, descriptor;
- protected int nameIndex, descriptorIndex;
-
- public int hashCode() { return name.hashCode() + descriptor.hashCode(); }
- public boolean equals(Object o) {
- return o instanceof NameAndTypeEntry
- && ((NameAndTypeEntry)o).name == name
- && ((NameAndTypeEntry)o).descriptor == descriptor;
- }
-
- public int getTag() { return CONSTANT_NameAndType; }
- public String getName() { return name; }
- public String getDescriptor() { return descriptor; }
-
- public int getSize() { return 1; }
- public void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(nameIndex);
- stream.writeShort(descriptorIndex);
- }
- // Follows javap output format for name/type pool entries.
- public String toString() {
- String natName = getName();
- if (JMethod.INSTANCE_CONSTRUCTOR_NAME.equals(natName))
- natName = "\""+natName+"\"";
- return "NameAndType\t#"+nameIndex+":#"+descriptorIndex+
- ";// "+natName+":"+getDescriptor();
- }
- public String toComment(String ownerClassname) { return ""; }
- }
-
- protected class NameAndTypeEntryValue
- extends NameAndTypeEntry
- implements EntryValue {
- public NameAndTypeEntryValue(String name, String descriptor) {
- this.name = name.intern();
- this.descriptor = descriptor.intern();
- }
- public void addChildren() {
- nameIndex = addUtf8(name);
- descriptorIndex = addUtf8(descriptor);
- }
- }
-
- protected class NameAndTypeEntryIndex
- extends NameAndTypeEntry
- implements EntryIndex {
- public NameAndTypeEntryIndex(int nameIndex, int descriptorIndex) {
- this.nameIndex = nameIndex;
- this.descriptorIndex = descriptorIndex;
- }
- public NameAndTypeEntryIndex(DataInputStream stream) throws IOException {
- this(stream.readShort(), stream.readShort());
- }
- public String getName() {
- if (name == null) fetchChildren();
- return super.getName();
- }
- public String getDescriptor() {
- if (descriptor == null) fetchChildren();
- return super.getDescriptor();
- }
- public void fetchChildren() {
- name = lookupUtf8(nameIndex);
- descriptor = lookupUtf8(descriptorIndex);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
deleted file mode 100644
index 6ee05e43c7..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JConstantValueAttribute.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * ConstantValue attribute representing the value of a constant field.
- *
- * There can be no more than one ConstantValue attribute in the attributes
- * table of a given field_info structure.. See section 4.8.2 of the JVM
- * specification.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JConstantValueAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected int constantValueIndex;
-
- public JConstantValueAttribute(FJBGContext context,
- JClass clazz,
- JField field) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- assert field.getOwner() == clazz;
- }
-
- public JConstantValueAttribute(FJBGContext context,
- JClass clazz,
- Object owner, // JField
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- this.constantValueIndex = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "ConstantValue"; }
-
- // Follows javap output format for ConstantValue attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Constant value: ");
- buf.append(pool.lookupEntry(constantValueIndex));
- return buf.toString();
- }
-
- protected int getSize() {
- return 2; // Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(constantValueIndex);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
deleted file mode 100644
index f663f00ae1..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JEnclosingMethodAttribute.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * EclosingMethod attribute
-
- * A class must have an EnclosingMethod attribute if and only if it is a
- * local class or an anonymous class. A class may have no more than one
- * EnclosingMethod attribute. See section 4.8.6 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JEnclosingMethodAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected final int classIdx;
- protected final int nameAndTypeIdx;
-
- public JEnclosingMethodAttribute(FJBGContext context,
- JClass clazz,
- String className,
- String methodName,
- JType methodType) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- this.classIdx = pool.addClass(className);
- this.nameAndTypeIdx = pool.addNameAndType(methodName, methodType.getSignature());
- }
-
- public JEnclosingMethodAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- this.classIdx = stream.readShort();
- this.nameAndTypeIdx = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "EnclosingMethod"; }
-
- // Follows javap output format for EnclosingMethod attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" EnclosingMethod:");
- buf.append("\n #");
- buf.append(classIdx);
- if (nameAndTypeIdx != 0) {
- buf.append(" of #");
- buf.append(nameAndTypeIdx);
- }
- buf.append(";\t// ");
- buf.append(pool.lookupEntry(classIdx));
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 4; // 2 * Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(classIdx);
- stream.writeShort(nameAndTypeIdx);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
deleted file mode 100644
index b91d0f2e93..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExceptionsAttribute.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Exceptions attribute
-
- * This table is used by compilers to indicate which Exceptions a method
- * is declared to throw. See section 2.6.4 of the JVM specification.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JExceptionsAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected int[] indexTable;
- protected int count;
-
- public JExceptionsAttribute(FJBGContext context,
- JClass clazz,
- JMethod owner) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- this.count = 0;
- this.indexTable = new int[8]; // some size > count
-
- assert clazz == owner.getOwner();
- }
-
- public JExceptionsAttribute(FJBGContext context,
- JClass clazz,
- Object owner, //JMethod
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- this.count = stream.readShort();
- this.indexTable = new int[count];
- for (int i = 0; i < count; ++i)
- indexTable[i] = stream.readShort();
-
- assert name.equals(getName());
- }
-
- public void addEntry(int classIndex) {
- if (count >= indexTable.length) {
- int[] newIT = new int[indexTable.length * 2];
- System.arraycopy(indexTable, 0, newIT, 0, indexTable.length);
- indexTable = newIT;
- }
- indexTable[count++] = classIndex;
- }
-
- public String getName() { return "Exceptions"; }
-
- // Follows javap output format for Exceptions attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" Exceptions: ");
- for (int i = 0; i < indexTable.length; ++i) {
- buf.append("\n throws ");
- buf.append(JClass.toExternalName(pool.lookupClass(indexTable[i])));
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2 + indexTable.length * 2;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(count);
- for (int i = 0; i < count; ++i)
- stream.writeShort(indexTable[i]);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
deleted file mode 100644
index d82db8289f..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ /dev/null
@@ -1,667 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Extended list of instructions, providing pseudo-instructions which
- * are easier to use than the standard ones.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JExtendedCode extends JCode {
- public final static int COND_EQ = 0;
- public final static int COND_NE = 1;
- public final static int COND_LT = 2;
- public final static int COND_GE = 3;
- public final static int COND_GT = 4;
- public final static int COND_LE = 5;
-
- private final JOpcode[] forbidden = new JOpcode[0];
- private final JOpcode[] nothingToDo = new JOpcode[0];
-
- private final JOpcode[][][] typeConversions = {
- {
- /* T_BOOLEAN -> T_BOOLEAN */ nothingToDo,
- /* T_BOOLEAN -> T_CHAR */ forbidden,
- /* T_BOOLEAN -> T_FLOAT */ forbidden,
- /* T_BOOLEAN -> T_DOUBLE */ forbidden,
- /* T_BOOLEAN -> T_BYTE */ forbidden,
- /* T_BOOLEAN -> T_SHORT */ forbidden,
- /* T_BOOLEAN -> T_INT */ forbidden,
- /* T_BOOLEAN -> T_LONG */ forbidden
- },
- {
- /* T_CHAR -> T_BOOLEAN */ forbidden,
- /* T_CHAR -> T_CHAR */ nothingToDo,
- /* T_CHAR -> T_FLOAT */ {JOpcode.I2F},
- /* T_CHAR -> T_DOUBLE */ {JOpcode.I2D},
- /* T_CHAR -> T_BYTE */ {JOpcode.I2B},
- /* T_CHAR -> T_SHORT */ {JOpcode.I2S},
- /* T_CHAR -> T_INT */ nothingToDo,
- /* T_CHAR -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_FLOAT -> T_BOOLEAN */ forbidden,
- /* T_FLOAT -> T_CHAR */ {JOpcode.F2I, JOpcode.I2C},
- /* T_FLOAT -> T_FLOAT */ nothingToDo,
- /* T_FLOAT -> T_DOUBLE */ {JOpcode.F2D},
- /* T_FLOAT -> T_BYTE */ {JOpcode.F2I, JOpcode.I2B},
- /* T_FLOAT -> T_SHORT */ {JOpcode.F2I, JOpcode.I2S},
- /* T_FLOAT -> T_INT */ {JOpcode.F2I},
- /* T_FLOAT -> T_LONG */ {JOpcode.F2L}
- },
- {
- /* T_DOUBLE -> T_BOOLEAN */ forbidden,
- /* T_DOUBLE -> T_CHAR */ {JOpcode.D2I, JOpcode.I2C},
- /* T_DOUBLE -> T_FLOAT */ {JOpcode.D2F},
- /* T_DOUBLE -> T_DOUBLE */ nothingToDo,
- /* T_DOUBLE -> T_BYTE */ {JOpcode.D2I, JOpcode.I2B},
- /* T_DOUBLE -> T_SHORT */ {JOpcode.D2I, JOpcode.I2S},
- /* T_DOUBLE -> T_INT */ {JOpcode.D2I},
- /* T_DOUBLE -> T_LONG */ {JOpcode.D2L}
- },
- {
- /* T_BYTE -> T_BOOLEAN */ forbidden,
- /* T_BYTE -> T_CHAR */ {JOpcode.I2C},
- /* T_BYTE -> T_FLOAT */ {JOpcode.I2F},
- /* T_BYTE -> T_DOUBLE */ {JOpcode.I2D},
- /* T_BYTE -> T_BYTE */ nothingToDo,
- /* T_BYTE -> T_SHORT */ nothingToDo,
- /* T_BYTE -> T_INT */ nothingToDo,
- /* T_BYTE -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_SHORT -> T_BOOLEAN */ forbidden,
- /* T_SHORT -> T_CHAR */ {JOpcode.I2C},
- /* T_SHORT -> T_FLOAT */ {JOpcode.I2F},
- /* T_SHORT -> T_DOUBLE */ {JOpcode.I2D},
- /* T_SHORT -> T_BYTE */ {JOpcode.I2B},
- /* T_SHORT -> T_SHORT */ nothingToDo,
- /* T_SHORT -> T_INT */ nothingToDo,
- /* T_SHORT -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_INT -> T_BOOLEAN */ forbidden,
- /* T_INT -> T_CHAR */ {JOpcode.I2C},
- /* T_INT -> T_FLOAT */ {JOpcode.I2F},
- /* T_INT -> T_DOUBLE */ {JOpcode.I2D},
- /* T_INT -> T_BYTE */ {JOpcode.I2B},
- /* T_INT -> T_SHORT */ {JOpcode.I2S},
- /* T_INT -> T_INT */ nothingToDo,
- /* T_INT -> T_LONG */ {JOpcode.I2L}
- },
- {
- /* T_LONG -> T_BOOLEAN */ forbidden,
- /* T_LONG -> T_CHAR */ {JOpcode.L2I, JOpcode.I2C},
- /* T_LONG -> T_FLOAT */ {JOpcode.L2F},
- /* T_LONG -> T_DOUBLE */ {JOpcode.L2D},
- /* T_LONG -> T_BYTE */ {JOpcode.L2I, JOpcode.I2B},
- /* T_LONG -> T_SHORT */ {JOpcode.L2I, JOpcode.I2S},
- /* T_LONG -> T_INT */ {JOpcode.L2I},
- /* T_LONG -> T_LONG */ nothingToDo
- }
- };
-
- public JExtendedCode(FJBGContext context,
- JClass clazz,
- JMethod owner) {
- super(context, clazz, owner);
- }
-
- public void emitPUSH(boolean value) { emitPUSH(value ? 1 : 0); }
- public void emitPUSH(Boolean value) { emitPUSH(value.booleanValue()); }
-
- public void emitPUSH(byte value) {
- switch (value) {
- case -1: emitICONST_M1(); break;
- case 0: emitICONST_0(); break;
- case 1: emitICONST_1(); break;
- case 2: emitICONST_2(); break;
- case 3: emitICONST_3(); break;
- case 4: emitICONST_4(); break;
- case 5: emitICONST_5(); break;
- default:
- emitBIPUSH(value);
- }
- }
- public void emitPUSH(Byte value) { emitPUSH(value.byteValue()); }
-
- public void emitPUSH(short value) {
- switch (value) {
- case -1: emitICONST_M1(); break;
- case 0: emitICONST_0(); break;
- case 1: emitICONST_1(); break;
- case 2: emitICONST_2(); break;
- case 3: emitICONST_3(); break;
- case 4: emitICONST_4(); break;
- case 5: emitICONST_5(); break;
- default:
- if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
- emitBIPUSH((byte)value);
- else
- emitSIPUSH(value);
- }
- }
- public void emitPUSH(Short value) { emitPUSH(value.shortValue()); }
-
- // TODO check that we do the right thing here
- public void emitPUSH(char value) { emitPUSH((int)value); }
- public void emitPUSH(Character value) { emitPUSH(value.charValue()); }
-
- public void emitPUSH(int value) {
- switch (value) {
- case -1: emitICONST_M1(); break;
- case 0: emitICONST_0(); break;
- case 1: emitICONST_1(); break;
- case 2: emitICONST_2(); break;
- case 3: emitICONST_3(); break;
- case 4: emitICONST_4(); break;
- case 5: emitICONST_5(); break;
- default:
- if (value >= Byte.MIN_VALUE && value <= Byte.MAX_VALUE)
- emitBIPUSH((byte)value);
- else if (value >= Short.MIN_VALUE && value <= Short.MAX_VALUE)
- emitSIPUSH((short)value);
- else
- emitPUSH_index(pool.addInteger(value));
- break;
- }
- }
- public void emitPUSH(Integer value) { emitPUSH(value.intValue()); }
-
- public void emitPUSH(long value) {
- if (value == 0L)
- emitLCONST_0();
- else if (value == 1L)
- emitLCONST_1();
- else
- emitLDC2_W(value);
- }
- public void emitPUSH(Long value) { emitPUSH(value.longValue()); }
-
- private static final Float ZEROF = Float.valueOf(0f);
- private static final Float ONEF = Float.valueOf(1f);
- private static final Float TWOF = Float.valueOf(2f);
- public void emitPUSH(Float value) {
- if (ZEROF.equals(value))
- emitFCONST_0();
- else if (ONEF.equals(value))
- emitFCONST_1();
- else if (TWOF.equals(value))
- emitFCONST_2();
- else
- emitPUSH_index(pool.addFloat(value.floatValue()));
- }
- public void emitPUSH(float value) { emitPUSH(Float.valueOf(value)); }
-
- private static final Double ZEROD = Double.valueOf(0d);
- private static final Double ONED = Double.valueOf(1d);
- public void emitPUSH(Double value) {
- if (ZEROD.equals(value))
- emitDCONST_0();
- else if (ONED.equals(value))
- emitDCONST_1();
- else
- emitLDC2_W(value.doubleValue());
- }
- public void emitPUSH(double value) { emitPUSH(Double.valueOf(value)); }
-
- public void emitPUSH(String s) {
- emitPUSH_index(pool.addString(s));
- }
-
- /** Pushes a class literal on the stack */
- public void emitPUSH(JReferenceType type) {
- assert owner.owner.major >= 49;
- emitPUSH_index(pool.addClass(type.getDescriptor()));
- }
-
- protected void emitPUSH_index(int index) {
- if (index <= 0xFF)
- emitU1(JOpcode.LDC, index);
- else
- emitU2(JOpcode.LDC_W, index);
- }
-
- public void emitLOAD(int index, JType type) {
- JOpcode opcode;
-
- switch (type.getTag()) {
- case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
- case JType.T_SHORT: case JType.T_INT:
- switch (index) {
- case 0: emitILOAD_0(); return;
- case 1: emitILOAD_1(); return;
- case 2: emitILOAD_2(); return;
- case 3: emitILOAD_3(); return;
- default: opcode = JOpcode.ILOAD;
- } break;
- case JType.T_FLOAT:
- switch (index) {
- case 0: emitFLOAD_0(); return;
- case 1: emitFLOAD_1(); return;
- case 2: emitFLOAD_2(); return;
- case 3: emitFLOAD_3(); return;
- default: opcode = JOpcode.FLOAD;
- } break;
- case JType.T_LONG:
- switch (index) {
- case 0: emitLLOAD_0(); return;
- case 1: emitLLOAD_1(); return;
- case 2: emitLLOAD_2(); return;
- case 3: emitLLOAD_3(); return;
- default: opcode = JOpcode.LLOAD;
- } break;
- case JType.T_DOUBLE:
- switch (index) {
- case 0: emitDLOAD_0(); return;
- case 1: emitDLOAD_1(); return;
- case 2: emitDLOAD_2(); return;
- case 3: emitDLOAD_3(); return;
- default: opcode = JOpcode.DLOAD;
- } break;
- case JType.T_ARRAY: case JType.T_OBJECT:
- switch (index) {
- case 0: emitALOAD_0(); return;
- case 1: emitALOAD_1(); return;
- case 2: emitALOAD_2(); return;
- case 3: emitALOAD_3(); return;
- default: opcode = JOpcode.ALOAD;
- } break;
- default:
- throw new IllegalArgumentException("invalid type for load "+type);
- }
-
- if (index > 0xFF)
- emitWIDE(opcode, index);
- else
- emitU1(opcode, index);
- }
- public void emitLOAD(JLocalVariable var) {
- emitLOAD(var.index, var.type);
- }
-
- public void emitSTORE(int index, JType type) {
- JOpcode opcode;
-
- switch (type.getTag()) {
- case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
- case JType.T_SHORT: case JType.T_INT:
- switch (index) {
- case 0: emitISTORE_0(); return;
- case 1: emitISTORE_1(); return;
- case 2: emitISTORE_2(); return;
- case 3: emitISTORE_3(); return;
- default: opcode = JOpcode.ISTORE;
- } break;
- case JType.T_FLOAT:
- switch (index) {
- case 0: emitFSTORE_0(); return;
- case 1: emitFSTORE_1(); return;
- case 2: emitFSTORE_2(); return;
- case 3: emitFSTORE_3(); return;
- default: opcode = JOpcode.FSTORE;
- } break;
- case JType.T_LONG:
- switch (index) {
- case 0: emitLSTORE_0(); return;
- case 1: emitLSTORE_1(); return;
- case 2: emitLSTORE_2(); return;
- case 3: emitLSTORE_3(); return;
- default: opcode = JOpcode.LSTORE;
- } break;
- case JType.T_DOUBLE:
- switch (index) {
- case 0: emitDSTORE_0(); return;
- case 1: emitDSTORE_1(); return;
- case 2: emitDSTORE_2(); return;
- case 3: emitDSTORE_3(); return;
- default: opcode = JOpcode.DSTORE;
- } break;
- case JType.T_ARRAY: case JType.T_OBJECT: case JType.T_ADDRESS:
- switch (index) {
- case 0: emitASTORE_0(); return;
- case 1: emitASTORE_1(); return;
- case 2: emitASTORE_2(); return;
- case 3: emitASTORE_3(); return;
- default: opcode = JOpcode.ASTORE;
- } break;
- default:
- throw new IllegalArgumentException("invalid type for store "+type);
- }
-
- if (index > 0xFF)
- emitWIDE(opcode, index);
- else
- emitU1(opcode, index);
- }
- public void emitSTORE(JLocalVariable var) {
- emitSTORE(var.index, var.type);
- }
-
- public void emitALOAD(JType type) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN:
- case JType.T_BYTE:
- emitBALOAD();
- break;
- case JType.T_CHAR:
- emitCALOAD();
- break;
- case JType.T_SHORT:
- emitSALOAD();
- break;
- case JType.T_INT:
- emitIALOAD();
- break;
- case JType.T_FLOAT:
- emitFALOAD();
- break;
- case JType.T_LONG:
- emitLALOAD();
- break;
- case JType.T_DOUBLE:
- emitDALOAD();
- break;
- case JType.T_ARRAY:
- case JType.T_OBJECT:
- emitAALOAD();
- break;
- default:
- throw new IllegalArgumentException("invalid type for aload " + type);
- }
- }
-
- public void emitASTORE(JType type) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN:
- case JType.T_BYTE:
- emitBASTORE();
- break;
- case JType.T_CHAR:
- emitCASTORE();
- break;
- case JType.T_SHORT:
- emitSASTORE();
- break;
- case JType.T_INT:
- emitIASTORE();
- break;
- case JType.T_FLOAT:
- emitFASTORE();
- break;
- case JType.T_LONG:
- emitLASTORE();
- break;
- case JType.T_DOUBLE:
- emitDASTORE();
- break;
- case JType.T_ARRAY:
- case JType.T_OBJECT:
- emitAASTORE();
- break;
- default:
- throw new IllegalArgumentException("invalid type for astore " + type);
- }
- }
-
- public void emitRETURN(JType type) {
- if (type.isValueType()) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN:
- case JType.T_BYTE:
- case JType.T_CHAR:
- case JType.T_SHORT:
- case JType.T_INT:
- emitIRETURN();
- break;
- case JType.T_FLOAT:
- emitFRETURN();
- break;
- case JType.T_LONG:
- emitLRETURN();
- break;
- case JType.T_DOUBLE:
- emitDRETURN();
- break;
- }
- } else if (type.isArrayType() || type.isObjectType())
- emitARETURN();
- else if (type == JType.VOID)
- emitRETURN();
- else
- throw new IllegalArgumentException("invalid type for RETURN " + type);
- }
-
- public void emitADD(JType type) {
- switch (type.getTag()) {
- case JType.T_BOOLEAN: case JType.T_BYTE: case JType.T_CHAR:
- case JType.T_SHORT: case JType.T_INT:
- emitIADD(); break;
- case JType.T_FLOAT:
- emitFADD(); break;
- case JType.T_LONG:
- emitLADD(); break;
- case JType.T_DOUBLE:
- emitDADD(); break;
- }
- }
-
- /**
- * Emits a basic type conversion instruction choosen according to the
- * types given in parameter.
- *
- * @param fromType The type of the value to be cast into another type.
- * @param toType The type the value will be cast into.
- */
- public void emitT2T(JType fromType, JType toType) {
- assert fromType.getTag() >= JType.T_BOOLEAN
- && fromType.getTag() <= JType.T_LONG
- && toType.getTag() >= JType.T_BOOLEAN
- && toType.getTag() <= JType.T_LONG;
-
- JOpcode[] conv = typeConversions[fromType.getTag() - 4][toType.getTag() - 4];
- if (conv == forbidden) {
- throw new Error("inconvertible types : " + fromType.toString()
- + " -> " + toType.toString());
- } else if (conv != nothingToDo) {
- for (int i = 0; i < conv.length; i++) {
- emit(conv[i]);
- }
- }
- }
-
- public void emitIF(int cond, Label label) throws OffsetTooBigException {
- assert cond >= COND_EQ && cond <= COND_LE;
- emitU2(JOpcode.OPCODES[153 + cond], label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitIF(int cond, int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- emitU2(JOpcode.OPCODES[153 + cond], offset);
- }
- public void emitIF(int cond) throws OffsetTooBigException {
- emitIF(cond, 0);
- }
-
- public void emitIF_ICMP(int cond, Label label) throws OffsetTooBigException {
- assert cond >= COND_EQ && cond <= COND_LE;
- emitU2(JOpcode.OPCODES[159 + cond], label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitIF_ICMP(int cond, int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- emitU2(JOpcode.OPCODES[159 + cond], offset);
- }
- public void emitIF_ICMP(int cond) throws OffsetTooBigException {
- emitIF_ICMP(cond, 0);
- }
-
- public void emitIF_ACMP(int cond, Label label) throws OffsetTooBigException {
- assert cond == COND_EQ || cond == COND_NE;
- emitU2(JOpcode.OPCODES[165 + cond], label.getOffset16(getPC() + 1, getPC()));
- }
- public void emitIF_ACMP(int cond, int targetPC) throws OffsetTooBigException {
- int offset = targetPC - getPC();
- emitU2(JOpcode.OPCODES[165 + cond], offset);
- }
- public void emitIF_ACMP(int cond) throws OffsetTooBigException {
- emitIF_ACMP(cond, 0);
- }
-
- public void emitGOTO_maybe_W(Label label, boolean defaultToWide) {
- if (label.anchored)
- emitGOTO_maybe_W(label.targetPC);
- else {
- if (defaultToWide)
- emitGOTO_W(label);
- else {
- try {
- emitGOTO(label);
- } catch (OffsetTooBigException e) {
- throw new Error(e);
- }
- }
- }
- }
-
- public void emitGOTO_maybe_W(int targetPC) {
- int offset = targetPC - (getPC() + 1);
- if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE)
- emitGOTO_W(targetPC);
- else {
- try {
- emitGOTO(targetPC);
- } catch (OffsetTooBigException e) {
- throw new Error(e);
- }
- }
- }
-
- /**
- * Emits a switch instruction choosen according to the caracteristics
- * of the given list of keys and a default maxRate.
- *
- * @param keySets The array of all keys that must be compared to the
- * value on stack.
- * @param branches The labels representing the jump addresses linked
- * with the corresponding keys.
- * @param defaultBranch The label representing the default branch
- * address.
- */
- public void emitSWITCH(int[][] keySets,
- Label[] branches,
- Label defaultBranch,
- double minDensity) {
- assert keySets.length == branches.length;
-
- int flatSize = 0;
- for (int i = 0; i < keySets.length; ++i)
- flatSize += keySets[i].length;
-
- int[] flatKeys = new int[flatSize];
- Label[] flatBranches = new Label[flatSize];
- int flatI = 0;
- for (int i = 0; i < keySets.length; ++i) {
- Label branch = branches[i];
- int[] keys = keySets[i];
- for (int j = 0; j < keys.length; ++j) {
- flatKeys[flatI] = keys[j];
- flatBranches[flatI] = branch;
- }
- ++flatI;
- }
- assert flatI == flatSize;
- emitSWITCH(flatKeys, flatBranches, defaultBranch, minDensity);
- }
-
- /**
- * Emits a switch instruction choosen according to the caracteristics
- * of the given list of keys and a given maxRate.
- *
- * @param keys The array of all keys that must be compared to the
- * value on stack.
- * @param branches The labels representing the jump addresses linked
- * with the corresponding keys.
- * @param defaultBranch The label representing the default branch
- * address.
- * @param minDensity The minimum density to use for TABLESWITCH.
- */
- public void emitSWITCH(int[] keys,
- Label[] branches,
- Label defaultBranch,
- double minDensity) {
- assert keys.length == branches.length;
-
- //The special case for empty keys. It makes sense to allow
- //empty keys and generate LOOKUPSWITCH with defaultBranch
- //only. This is exactly what javac does for switch statement
- //that has only a default case.
- if (keys.length == 0) {
- emitLOOKUPSWITCH(keys, branches, defaultBranch);
- return;
- }
- //the rest of the code assumes that keys.length > 0
-
- // sorting the tables
- // FIXME use quicksort
- for (int i = 1; i < keys.length; i++) {
- for (int j = 1; j <= keys.length - i; j++) {
- if (keys[j] < keys[j - 1]) {
- int tmp = keys[j];
- keys[j] = keys[j - 1];
- keys[j - 1] = tmp;
-
- Label tmp_l = branches[j];
- branches[j] = branches[j - 1];
- branches[j - 1] = tmp_l;
- }
- }
- }
-
- int keyMin = keys[0], keyMax = keys[keys.length - 1];
- /** Calculate in long to guard against overflow. */
- long keyRange = (long)keyMax - keyMin + 1;
- if ((double)keys.length / (double)keyRange >= minDensity) {
- // Keys are dense enough, use a table in which holes are
- // filled with defaultBranch.
- int[] newKeys = new int[(int)keyRange];
- Label[] newBranches = new Label[(int)keyRange];
- int oldPos = 0;
- for (int i = 0; i < keyRange; ++i) {
- int key = keyMin + i;
- newKeys[i] = key;
- if (keys[oldPos] == key) {
- newBranches[i] = branches[oldPos];
- ++oldPos;
- } else
- newBranches[i] = defaultBranch;
- }
- assert oldPos == keys.length;
- emitTABLESWITCH(newKeys, newBranches, defaultBranch);
- } else
- emitLOOKUPSWITCH(keys, branches, defaultBranch);
- }
-
- /**
- * Emits a method invocation instruction choosen according to
- * the caracteristics of the given method.
- *
- * @param method The method to be invoked.
- */
- public void emitINVOKE(JMethod method) {
- String mName = method.getName();
- String cName = method.getOwner().getName();
- JMethodType mType = (JMethodType)method.getType();
- if (method.isStatic())
- emitINVOKESTATIC(cName, mName, mType);
- else if (method.getOwner().isInterface())
- emitINVOKEINTERFACE(cName, mName, mType);
- else
- emitINVOKEVIRTUAL(cName, mName, mType);
- }
-
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JField.java b/src/fjbg/ch/epfl/lamp/fjbg/JField.java
deleted file mode 100644
index 29d826ba99..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JField.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-
-/**
- * Java class field.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JField extends JFieldOrMethod {
-
- protected JField(FJBGContext context,
- JClass owner,
- int accessFlags,
- String name,
- JType type) {
- super(context, owner, accessFlags, name, type);
- }
-
- protected JField(FJBGContext context,
- JClass owner,
- DataInputStream stream)
- throws IOException {
- super(context, owner, stream);
- }
-
- // Follows javap output format for fields.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(flagsToString());
- buf.append(toExternalName(getType()));
- buf.append(" ");
- buf.append(getName());
- buf.append(";\n");
- java.util.Iterator attrsIt = attributes.iterator();
- while (attrsIt.hasNext()) {
- JAttribute attrs = (JAttribute)attrsIt.next();
- buf.append(attrs);
- }
- return buf.toString();
- }
-
- private String flagsToString() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- if (isStatic()) buf.append("static ");
- else if (isTransient()) buf.append("transient ");
- else if (isVolatile()) buf.append("volatile ");
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
deleted file mode 100644
index 794c0f13b5..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JFieldOrMethod.java
+++ /dev/null
@@ -1,138 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Abstract superclass for a Java field or method.
- *
- * No two methods of fields in one class file may have the same name and
- * descriptor. See sections 4.6 and 4.7 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-abstract public class JFieldOrMethod extends JMember {
-
- protected final JClass owner;
- protected final JType type;
-
- protected final int nameIndex, signatureIndex;
-
- protected JFieldOrMethod(FJBGContext context,
- JClass owner,
- int accessFlags,
- String name,
- JType type) {
- super(context, accessFlags, name);
- this.owner = owner;
- this.type = type;
-
- nameIndex = owner.pool.addUtf8(name);
- signatureIndex = owner.pool.addUtf8(type.getSignature());
- }
-
- protected JFieldOrMethod(FJBGContext context,
- JClass owner,
- DataInputStream stream)
- throws IOException {
- super(context);
- this.owner = owner;
- this.accessFlags = stream.readShort();
- this.nameIndex = stream.readShort();
- this.name = owner.pool.lookupUtf8(nameIndex);
- this.signatureIndex = stream.readShort();
- this.type = JType.parseSignature(owner.pool.lookupUtf8(signatureIndex));
- this.attributes.addAll(JAttribute.readFrom(context, owner, this, stream));
- }
-
- public void freeze() throws JCode.OffsetTooBigException {
- assert !frozen;
- frozen = true;
- }
-
- public JClass getOwner() { return owner; }
-
- public JType getType() { return type; }
-
- public JClass getJClass() { return owner; }
-
- public boolean isPublic() {
- return (accessFlags & JAccessFlags.ACC_PUBLIC) != 0;
- }
-
- public boolean isPrivate() {
- return (accessFlags & JAccessFlags.ACC_PRIVATE) != 0;
- }
-
- public boolean isProtected() {
- return (accessFlags & JAccessFlags.ACC_PROTECTED) != 0;
- }
-
- public boolean isStatic() {
- return (accessFlags & JAccessFlags.ACC_STATIC) != 0;
- }
-
- public boolean isFinal() {
- return (accessFlags & JAccessFlags.ACC_FINAL) != 0;
- }
-
- public boolean isSuper() {
- return (accessFlags & JAccessFlags.ACC_SUPER) != 0;
- }
-
- public boolean isVolatile() {
- return (accessFlags & JAccessFlags.ACC_VOLATILE) != 0;
- }
-
- public boolean isTransient() {
- return (accessFlags & JAccessFlags.ACC_TRANSIENT) != 0;
- }
-
- public boolean isNative() {
- return (accessFlags & JAccessFlags.ACC_NATIVE) != 0;
- }
-
- public boolean isInterface() {
- return (accessFlags & JAccessFlags.ACC_INTERFACE) != 0;
- }
-
- public boolean isAbstract() {
- return (accessFlags & JAccessFlags.ACC_ABSTRACT) != 0;
- }
-
- public boolean isStrict() {
- return (accessFlags & JAccessFlags.ACC_STRICT) != 0;
- }
-
- // 1.5 specifics
- public boolean isBridge() {
- return (accessFlags & JAccessFlags.ACC_BRIDGE) != 0;
- }
-
- public boolean hasVarargs() {
- return (accessFlags & JAccessFlags.ACC_VARARGS) != 0;
- }
-
- public void writeTo(DataOutputStream stream) throws IOException {
- if (! frozen) {
- try {
- freeze();
- }
- catch (JCode.OffsetTooBigException e) {
- throw new Error(e);
- }
- }
- stream.writeShort(accessFlags);
- stream.writeShort(nameIndex);
- stream.writeShort(signatureIndex);
- JAttribute.writeTo(getAttributes(), stream);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
deleted file mode 100644
index 1c1ced500d..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JInnerClassesAttribute.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-/**
- * InnerClasses attribute.
- *
- * The ClassFile structure of a class/interface C must have exactly one
- * InnerClasses attribute in its attributes table if the constant pool of C
- * contains a CONSTANT_Class_info entry which represents a class or interface
- * that is not a member of a package. See section 4.8.5 of the JVM Specification.
- *
- * @author Iulian Dragos, Stephane Micheloud
- * @version 1.1
- */
-public class JInnerClassesAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- /** InnerClass entries */
- private Map/*<String, Entry>*/ entries = new LinkedHashMap();
-
- public JInnerClassesAttribute(FJBGContext context, JClass clazz) {
- super(context, clazz);
- this.pool = clazz.pool;
- }
-
- public JInnerClassesAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- String inner = null;
- int count = stream.readShort();
- for (int i = 0; i < count; ++i) {
- int innerIdx = stream.readShort();
- int outerIdx = stream.readShort();
- int nameIdx = stream.readShort();
- int flags = stream.readShort();
- inner = pool.lookupClass(innerIdx);
- entries.put(inner, new Entry(innerIdx, outerIdx, nameIdx, flags));
- }
-
- assert name.equals(getName());
- }
-
- public void addEntry(String inner, String outer, String name, int flags) {
- int innerIdx = pool.addClass(inner);
- int outerIdx = 0;
- if (outer != null) outerIdx = pool.addClass(outer);
- int nameIdx = 0;
- if (name != null) nameIdx = pool.addUtf8(name);
-
- Entry e = new Entry(innerIdx, outerIdx, nameIdx, flags);
-
- if (entries.containsKey(inner)) {
- Entry other = (Entry) entries.get(inner);
- assert other.outerInfo == e.outerInfo && other.originalName == e.originalName && other.innerFlags == e.innerFlags
- : inner + " already declared as " + other;
- } else
- entries.put(inner, e);
- }
-
- public String getName() { return "InnerClasses"; }
-
- // Follows javap output format for the InnerClass attribute.
- /*@Override*/ public String toString() {
- // Here we intentionally use "InnerClass" as javap :-(
- StringBuffer buf = new StringBuffer(" InnerClass: ");
- for (Iterator it = entries.values().iterator(); it.hasNext(); ) {
- Entry e = (Entry)it.next();
- buf.append("\n ");
- buf.append(e.innerFlagsToString());
- buf.append("#");
- if (e.originalName != 0) {
- buf.append(e.originalName);
- buf.append("= #");
- }
- buf.append(e.innerInfo);
- if (e.outerInfo != 0) {
- buf.append(" of #");
- buf.append(e.outerInfo);
- }
- buf.append("; //");
- if (e.originalName != 0) {
- buf.append(pool.lookupUtf8(e.originalName));
- buf.append("=");
- }
- buf.append("class ");
- buf.append(pool.lookupClass(e.innerInfo));
- if (e.outerInfo != 0) {
- buf.append(" of class ");
- buf.append(pool.lookupClass(e.outerInfo));
- }
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2 + entries.size() * 8;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(entries.size());
- for (Iterator it = entries.values().iterator(); it.hasNext(); ) {
- Entry e = (Entry)it.next();
- stream.writeShort(e.innerInfo);
- stream.writeShort(e.outerInfo);
- stream.writeShort(e.originalName);
- stream.writeShort(e.innerFlags);
- }
- }
-
- /** An entry in the InnerClasses attribute, as defined by the JVM Spec. */
- private class Entry {
- /** CONSTANT_Class_info index in the pool for the inner class (mangled). */
- int innerInfo;
-
- /** CONSTANT_Class_info index in the pool for the outer class (mangled). */
- int outerInfo;
-
- /** CONSTANT_Utf8_info index in the pool for the original name of the inner class. */
- int originalName;
-
- /** Short int for modifier flags. */
- int innerFlags;
-
- public Entry(int iI, int oI, int oN, int f) {
- this.innerInfo = iI;
- this.outerInfo = oI;
- this.originalName = oN;
- this.innerFlags = f;
- }
-
- public Entry(String innerClass, String outerClass, String name, int flags) {
- this(pool.addClass(innerClass), pool.addClass(outerClass), pool.addUtf8(name), flags);
- }
-
- /** Two entries are equal if they refer to the same inner class.
- * innerInfo represents a unique name (mangled).
- */
- public boolean equals(Object other) {
- if (other instanceof Entry) {
- Entry otherEntry = (Entry) other;
- return otherEntry.innerInfo == this.innerInfo;
- }
- return false;
- }
-
- public String innerFlagsToString() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- //if (isStatic()) buf.append("static "); // as javap
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- return buf.toString();
- }
-
- private boolean isPublic() {
- return (innerFlags & JAccessFlags.ACC_PUBLIC) != 0;
- }
-
- private boolean isPrivate() {
- return (innerFlags & JAccessFlags.ACC_PRIVATE) != 0;
- }
-
- private boolean isProtected() {
- return (innerFlags & JAccessFlags.ACC_PROTECTED) != 0;
- }
-
- private boolean isStatic() {
- return (innerFlags & JAccessFlags.ACC_STATIC) != 0;
- }
-
- private boolean isFinal() {
- return (innerFlags & JAccessFlags.ACC_FINAL) != 0;
- }
-
- private boolean isAbstract() {
- return (innerFlags & JAccessFlags.ACC_ABSTRACT) != 0;
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java b/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
deleted file mode 100644
index 96f3b4ebef..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLabel.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Labels which can be attached to instructions.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLabel {
- public final static int UNDEFINED_ANCHOR = -1;
- protected int anchor = UNDEFINED_ANCHOR;
-
- public boolean isAnchored() { return anchor != UNDEFINED_ANCHOR; }
-
- public int getAnchor() {
- assert isAnchored();
- return anchor;
- }
-
- public void setAnchor(int anchor) {
- assert !isAnchored();
- this.anchor = anchor;
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
deleted file mode 100644
index f8c09b8ef8..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLineNumberTableAttribute.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Attribute storing correspondance between instructions and source
- * line numbers.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLineNumberTableAttribute extends JAttribute {
- protected final JCode code;
-
- public JLineNumberTableAttribute(FJBGContext context,
- JClass clazz,
- JCode owner) {
- super(context, clazz);
- this.code = owner;
-
- assert owner.getOwner().getOwner() == clazz;
- }
-
- public JLineNumberTableAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.code = (JCode)owner;
-
- int[] mapping = new int[code.getSize()];
-
- int count = stream.readShort();
- for (int i = 0; i < count; ++i) {
- int startPC = stream.readShort();
- int lineNum = stream.readShort();
- mapping[startPC] = lineNum;
- }
-
- // Avoids duplication of LineNumberTable attribute
- // (see method ensureLineNumberCapacity in class JCode).
- assert code.lineNumbers == null;
- code.lineNumbers = new int[0];
-
- int lineNum = 0;
- for (int pc = 0; pc < mapping.length; ++pc) {
- if (mapping[pc] != 0) lineNum = mapping[pc];
- if (lineNum != 0) code.setLineNumber(pc, lineNum);
- }
-
- assert name.equals(getName());
- }
-
- public String getName() { return "LineNumberTable"; }
-
- // Follows javap output format for LineNumberTable attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" LineNumberTable: ");
- int[] encoding = encode();
- for (int i = 0; i < encoding.length/2; ++i) {
- buf.append("\n line ");
- buf.append(encoding[i * 2 + 1]);
- buf.append(": ");
- buf.append(encoding[i * 2]);
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int[] encoding;
- protected int[] encode() {
- if (encoding == null) {
- int[] lineNumbers = code.getLineNumbers();
- int[] preEncoding = new int[lineNumbers.length * 2];
- int prevLineNum = 0;
-
- int i = 0;
- for (int pc = 0; pc < lineNumbers.length; ++pc) {
- int lineNum = lineNumbers[pc];
- if (lineNum != 0 & lineNum != prevLineNum) {
- preEncoding[i++] = pc;
- preEncoding[i++] = lineNum;
- prevLineNum = lineNum;
- }
- }
- if (i == preEncoding.length)
- encoding = preEncoding;
- else {
- encoding = new int[i];
- System.arraycopy(preEncoding, 0, encoding, 0, i);
- }
- }
- return encoding;
- }
-
- protected int getSize() {
- int[] encoding = encode();
- return 2 + encoding.length * 2;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- int[] encoding = encode();
- int entries = encoding.length / 2;
- stream.writeShort(entries);
- for (int i = 0; i < entries; ++i) {
- stream.writeShort(encoding[i * 2]);
- stream.writeShort(encoding[i * 2 + 1]);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
deleted file mode 100644
index af7980656f..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariable.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Representation of a local variable or method argument.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JLocalVariable {
- protected final JMethod owner;
- protected final JType type;
- protected final String name;
- protected final int index;
-
- protected JLocalVariable(FJBGContext context,
- JMethod owner,
- JType type,
- String name,
- int index) {
- this.owner = owner;
- this.type = type;
- this.name = name;
- this.index = index;
-
- assert index < 0xFFFF : "index too big for local variable: " + index;
- }
-
- public JMethod getOwner() { return owner; }
- public int getIndex() { return index; }
- public String getName() { return name; }
- public JType getType() { return type; }
-
- /*@Override*/ public String toString() {
- return "0\t"+type.getSize()+"\t"+index+"\t"+name+"\t"+type;
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
deleted file mode 100644
index b277cc71c0..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JLocalVariableTableAttribute.java
+++ /dev/null
@@ -1,167 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-
-import ch.epfl.lamp.fjbg.JConstantPool.*;
-
-/**
- * Attribute storing local variables.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JLocalVariableTableAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- protected final LinkedList/*<Entry>*/ entries = new LinkedList();
- protected int localVariableIndex = 0;
-
- public JLocalVariableTableAttribute(FJBGContext context,
- JClass clazz,
- JCode code) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- assert code.getOwner().getOwner() == clazz;
- }
-
- public JLocalVariableTableAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- int count = stream.readShort();
- for (int i = 0; i < count; ++i) {
- int startPc = stream.readShort();
- int length = stream.readShort();
- int nameIndex = stream.readShort();
- int descIndex = stream.readShort();
- int index = stream.readShort();
- addEntry(startPc, length, nameIndex, descIndex, index);
- }
-
- assert name.equals(getName());
- }
-
- public void addEntry(int startPc, int length, int nameIndex,
- int descIndex, int index) {
- entries.add(new Entry(startPc, length, nameIndex, descIndex, index));
- }
-
- public void addEntry(int startPc, int length, String name,
- String desc, int index) {
- Entry e = new Entry(startPc, length, name, desc, index);
- Entry other = getEntry(index);
- if (other != null) {
- assert other.nameIndex == e.nameIndex && other.descIndex == e.descIndex
- : e + " already declared as " + other;
- } else
- entries.add(e);
- }
-
- public void addEntry(int startPc, int length, String name, String desc) {
- entries.add(new Entry(startPc, length, name, desc));
- }
-
- public String getName() { return "LocalVariableTable"; }
-
- // Follows javap output format for LocalVariableTable attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" LocalVariableTable: ");
- buf.append("\n Start Length Slot Name Signature");
- for (Iterator it = entries.iterator(); it.hasNext(); ) {
- buf.append("\n ");
- Entry e = (Entry)it.next();
- Utf8Entry name = (Utf8Entry)pool.lookupEntry(e.nameIndex);
- Utf8Entry sig = (Utf8Entry)pool.lookupEntry(e.descIndex);
- buf.append(e.startPc);
- buf.append(" ");
- buf.append(e.length);
- buf.append(" ");
- buf.append(e.index);
- buf.append(" ");
- buf.append(name.getValue());
- buf.append(" ");
- buf.append(sig.getValue());
- }
- buf.append("\n");
- return buf.toString();
- }
-
- public int getMaxLocals() {
- return localVariableIndex;
- }
-
- public int getSize() {
- return 2 + entries.size() * 10;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(entries.size());
- for (Iterator it = entries.iterator(); it.hasNext(); ) {
- Entry e = (Entry)it.next();
- stream.writeShort(e.startPc);
- stream.writeShort(e.length);
- stream.writeShort(e.nameIndex);
- stream.writeShort(e.descIndex);
- stream.writeShort(e.index);
- }
- }
-
- private Entry getEntry(int index) {
- Entry e = null;
- try { e = (Entry)entries.get(index); } catch (Exception ex) {}
- return e;
- }
-
- private class Entry {
- int startPc;
- int length;
- int nameIndex;
- int descIndex;
- int index;
-
- public Entry(int startPc, int length, int nameIndex, int descIndex, int index) {
- this.startPc = startPc;
- this.length = length;
- this.nameIndex = nameIndex;
- this.descIndex = descIndex;
- this.index = index;
- localVariableIndex += length;
- }
-
- public Entry(int startPc, int length, String name, String desc, int index) {
- this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), index);
- }
-
- public Entry(int startPc, int length, String name, String desc) {
- this(startPc, length, pool.addUtf8(name), pool.addUtf8(desc), localVariableIndex);
- }
-
- /** Two entries are equal if they refer to the same index.
- */
- public boolean equals(Object other) {
- if (other instanceof Entry) {
- Entry otherEntry = (Entry) other;
- return otherEntry.index == this.index;
- }
- return false;
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java b/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
deleted file mode 100644
index 6356cc874d..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMember.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Abstract superclass for a Java class, field or method.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-
-abstract public class JMember {
-
- protected boolean frozen = false;
-
- protected final FJBGContext context;
-
- protected String name;
-
- protected int accessFlags;
-
- protected final List/*<JAttribute>*/ attributes = new LinkedList();
-
- protected JMember(FJBGContext context) { this.context = context; }
-
- protected JMember(FJBGContext context, int accessFlags, String name) {
- this(context);
- this.name = name;
- this.accessFlags = accessFlags;
- }
-
- /**
- * Gets the access flags of the class.
- * @return The int representing the access flags of the class.
- */
- public int getAccessFlags() { return accessFlags; }
-
- /**
- * Gets the name of the member.
- * @return The string representing the name of the member.
- */
- public String getName() { return name; }
-
- /**
- * Gets the type of the objects that are instances of the class.
- * @return The type of the instances of the class.
- */
- public abstract JType getType();
-
- /**
- * Gets the class corresponding to/owning this member
- * @return The class owning this member or the class itself.
- */
- public abstract JClass getJClass();
-
- /**
- * Gets the constant pool of the class.
- * @return The constant pool of the class.
- */
- public JConstantPool getConstantPool() { return getJClass().getConstantPool(); }
-
- public FJBGContext getContext() { return context; }
-
- /**
- * Adds an attribute to the class.
- * @param attr The attribute to be added.
- */
- public void addAttribute(JAttribute attr) {
- assert !frozen;
- attributes.add(attr);
- }
-
- /**
- * Gets the list of all attributes of the class.
- * @return The list of the attributes of the class representation.
- */
- public List/*<JAttribute>*/ getAttributes() {
- return attributes;
- }
-
- /**
- * Get the attribute with the given name, or null if it doesn't
- * exist.
- */
- public JAttribute getAttribute(String name) {
- Iterator attrIt = getAttributes().iterator();
- while (attrIt.hasNext()) {
- JAttribute attr = (JAttribute)attrIt.next();
- if (attr.getName().equals(name))
- return attr;
- }
- return null;
- }
-
- protected static String toExternalName(String name) {
- return name.replace('/', '.');
- }
-
- protected static String toExternalName(JType tpe) {
- return tpe.toString().replace(':', '.');
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
deleted file mode 100644
index 01d58a45c7..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethod.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.IOException;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Representation of a Java method.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JMethod extends JFieldOrMethod {
- public final static String CLASS_CONSTRUCTOR_NAME = "<clinit>";
- public final static String INSTANCE_CONSTRUCTOR_NAME = "<init>";
-
- protected /*final*/ JCode code;
- protected final String[] argNames;
-
- protected final LinkedList/*<JLocalVariable>*/ localVariables =
- new LinkedList();
- protected int localVariableIndex = 0;
-
-
- protected JMethod(FJBGContext context,
- JClass owner,
- int accessFlags,
- String name,
- JType returnType,
- JType[] argTypes,
- String[] argNames) {
- super(context,
- owner,
- accessFlags,
- name,
- new JMethodType(returnType, argTypes));
- this.argNames = argNames;
-
- assert argTypes.length == argNames.length;
-
- if (isAbstract() || isNative()) {
- code = null;
- } else {
- code = context.JCode(owner, this);
- addAttribute(context.JCodeAttribute(owner, this));
-
- if (!isStatic())
- addNewLocalVariable(owner.getType(), "this");
-
- for (int i = 0; i < argTypes.length; ++i)
- addNewLocalVariable(argTypes[i], argNames[i]);
- }
- }
-
- protected JMethod(FJBGContext context,
- JClass owner,
- DataInputStream stream)
- throws IOException {
- super(context, owner, stream);
-
- assert isAbstract() || isNative() || code != null;
-
- int n = 0;
- if (code != null) {
- for (Iterator it = code.getAttributes().iterator(); it.hasNext(); ) {
- JAttribute attr = (JAttribute)it.next();
- if (attr instanceof JLocalVariableTableAttribute)
- n = ((JLocalVariableTableAttribute)attr).getMaxLocals();
- }
- }
- this.localVariableIndex = n;
-
-
- JType[] argTypes = ((JMethodType)getType()).getArgumentTypes();
- argNames = new String[argTypes.length]; // TODO get from attribute
- for (int i = 0; i < argNames.length; ++i)
- argNames[i] = "v"+i;
- }
-
- public void freeze() throws JCode.OffsetTooBigException {
- if (code != null) code.freeze();
- super.freeze();
- }
-
- public JType getReturnType() {
- return ((JMethodType)type).getReturnType();
- }
-
- public JType[] getArgumentTypes() {
- return ((JMethodType)type).getArgumentTypes();
- }
-
- public int getArgsSize() {
- int size = ((JMethodType)type).getArgsSize();
- if (!isStatic()) size += 1; // for this
- return size;
- }
-
- public String[] getArgumentNames() {
- return argNames;
- }
-
- public JCode getCode() {
- assert !isAbstract();
- return code;
- }
-
- // Invoked by the JCode constructor
- protected void setCode(JCode code) {
- assert null == this.code;
- this.code = code;
- }
-
- public JCodeIterator codeIterator() {
- return new JCodeIterator(code);
- }
-
- // Local variables
- // FIXME : find a better management method for local variables
- public JLocalVariable addNewLocalVariable(JType type, String name) {
- assert !frozen;
- JLocalVariable var =
- context.JLocalVariable(this, type, name, localVariableIndex);
- localVariableIndex += type.getSize();
- localVariables.add(var);
- return var;
- }
-
- public JLocalVariable getLocalVariable(int index) {
- for (int i = 0; i < localVariables.size(); i++) {
- if (((JLocalVariable)localVariables.get(i)).index == index)
- return (JLocalVariable)localVariables.get(i);
- }
- return null;
- }
-
- public JLocalVariable[] getLocalVariables() {
- return (JLocalVariable[])localVariables
- .toArray(new JLocalVariable[localVariables.size()]);
- }
-
-
- public int getMaxLocals() {
- return localVariableIndex;
- }
-
- // Follows javap output format for methods.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(flagsToString());
- String name = getName();
- if (CLASS_CONSTRUCTOR_NAME.equals(name))
- buf.append("{}");
- else {
- if (INSTANCE_CONSTRUCTOR_NAME.equals(name))
- name = getOwner().getName();
- else {
- buf.append(toExternalName(getReturnType()));
- buf.append(" ");
- }
- buf.append(toExternalName(name));
- buf.append("(");
- JType[] ts = getArgumentTypes();
- for (int i = 0; i < ts.length; ++i) {
- if (i > 0) buf.append(", ");
- buf.append(toExternalName(ts[i]));
- }
- buf.append(")");
- }
- buf.append(";\n");
- Iterator it = attributes.iterator();
- while(it.hasNext()) {
- JAttribute attr = (JAttribute)it.next();
- buf.append(attr);
- }
- return buf.toString();
- }
-
- private String flagsToString() {
- StringBuffer buf = new StringBuffer();
- if (isPublic()) buf.append("public ");
- else if (isProtected()) buf.append("protected ");
- else if (isPrivate()) buf.append("private ");
- if (isBridge()) buf.append("<bridge> ");
- if (hasVarargs()) buf.append("<varargs> ");
- if (isStatic()) buf.append("static ");
- else if (isNative()) buf.append("native ");
- if (isAbstract()) buf.append("abstract ");
- else if (isFinal()) buf.append("final ");
- return buf.toString();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java b/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
deleted file mode 100644
index cd3d71fd9c..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JMethodType.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Type for Java methods. These types do not really exist in Java, but
- * are provided here because they are useful in several places.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JMethodType extends JType {
- protected final JType returnType;
- protected final JType[] argTypes;
- protected String signature = null;
-
- public final static JMethodType ARGLESS_VOID_FUNCTION =
- new JMethodType(JType.VOID, JType.EMPTY_ARRAY);
-
- public JMethodType(JType returnType, JType[] argTypes) {
- this.returnType = returnType;
- this.argTypes = argTypes;
- }
-
- public JType getReturnType() { return returnType; }
- public JType[] getArgumentTypes() { return argTypes; }
-
- public int getSize() {
- throw new UnsupportedOperationException();
- }
-
- public String getSignature() {
- if (signature == null) {
- StringBuffer buf = new StringBuffer();
- buf.append('(');
- for (int i = 0; i < argTypes.length; ++i)
- buf.append(argTypes[i].getSignature());
- buf.append(')');
- buf.append(returnType.getSignature());
- signature = buf.toString();
- }
- return signature;
- }
-
- public int getTag() { return T_UNKNOWN; }
-
- public String toString() {
- StringBuffer buf = new StringBuffer();
- buf.append('(');
- for (int i = 0; i < argTypes.length; ++i)
- buf.append(argTypes[i].toString());
- buf.append(')');
- buf.append(returnType.toString());
- return buf.toString();
- }
-
- public int getArgsSize() {
- int size = 0;
- for (int i = 0; i < argTypes.length; ++i)
- size += argTypes[i].getSize();
- return size;
- }
-
- public int getProducedStack() {
- return returnType.getSize() - getArgsSize();
- }
-
- public boolean isCompatibleWith(JType other) {
- return false;
- }
- public boolean equals(Object o) {
- if (o instanceof JMethodType)
- return ((JMethodType)o).getSignature().equals(this.getSignature());
- else
- return false;
- }
- public int hashCode() {
- if (signature == null)
- return 0;
- else
- return signature.hashCode();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java b/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
deleted file mode 100644
index 06db5b115a..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JObjectType.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java objects.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JObjectType extends JReferenceType {
- protected final String name;
- protected String signature = null;
-
- public final static JObjectType JAVA_LANG_OBJECT =
- new JObjectType("java.lang.Object");
- public final static JObjectType JAVA_LANG_STRING =
- new JObjectType("java.lang.String");
- public final static JObjectType CLONEABLE =
- new JObjectType("Cloneable");
- public final static JObjectType JAVA_IO_SERIALIZABLE =
- new JObjectType("java.io.Serializable");
-
- public JObjectType(String name) {
- this.name = name;
- }
-
- public int getSize() { return 1; }
-
- public String getName() { return name; }
-
- public String getSignature() {
- if (signature == null)
- signature = "L" + name.replace('.','/') + ";";
- return signature;
- }
-
- public String getDescriptor() {
- return name.replace('.','/');
- }
-
- public int getTag() { return T_OBJECT; }
-
- public String toString() { return name; }
-
- public boolean isObjectType() { return true; }
-
- public boolean isCompatibleWith(JType other) {
- return other instanceof JObjectType
- || other == JType.REFERENCE;
- }
- public boolean equals(Object o) {
- if (o instanceof JObjectType)
- return ((JObjectType)o).getSignature().equals(this.getSignature());
- else
- return false;
- }
- public int hashCode() {
- return name.hashCode();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java b/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
deleted file mode 100644
index cc68681a96..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOpcode.java
+++ /dev/null
@@ -1,1267 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Definition of opcodes for the JVM.
- *
- * @author Michel Schinz, Thomas Friedli
- * @version 1.0
- */
-
-public class JOpcode {
- public final String name;
- public final int code;
-
- // The following attributes can be (statically) unknown for some
- // instructions, and are therefore not public. To know their value,
- // functions have to be used (see JCodeIterator).
- protected final int size;
- protected final JType[] producedDataTypes;
- protected final JType[] consumedDataTypes;
- protected final int jumpKind;
- protected final int successorCount;
-
- protected final static int UNKNOWN = Integer.MIN_VALUE;
-
- protected final static int JMP_NONE = 0;
- protected final static int JMP_NEXT = 1;
- protected final static int JMP_ALWAYS_S2_OFFSET = 2;
- protected final static int JMP_ALWAYS_S4_OFFSET = 3;
- protected final static int JMP_MAYBE_S2_OFFSET = 4;
- protected final static int JMP_TABLE = 5;
- protected final static int JMP_LOOKUP = 6;
-
- protected final static JType[] NO_DATA = new JType[0];
-
- protected final static JType[] INT_TYPE =
- new JType[] { JType.INT };
- protected final static JType[] FLOAT_TYPE =
- new JType[] { JType.FLOAT };
- protected final static JType[] LONG_TYPE =
- new JType[] { JType.LONG };
- protected final static JType[] DOUBLE_TYPE =
- new JType[] { JType.DOUBLE };
- protected final static JType[] OBJECT_REF_TYPE =
- new JType[] { JObjectType.JAVA_LANG_OBJECT };
- protected final static JType[] ARRAY_REF_TYPE =
- new JType[] { new JArrayType(JType.VOID) };
- protected final static JType[] REFERENCE_TYPE =
- new JType[] { JType.REFERENCE };
- protected final static JType[] ADDRESS_TYPE =
- new JType[] { JType.ADDRESS };
- protected final static JType[] UNKNOWN_TYPE =
- new JType[] { JType.UNKNOWN };
-
- /// Instruction codes
- public final static int cNOP = 0;
- public final static int cACONST_NULL = 1;
- public final static int cICONST_M1 = 2;
- public final static int cICONST_0 = 3;
- public final static int cICONST_1 = 4;
- public final static int cICONST_2 = 5;
- public final static int cICONST_3 = 6;
- public final static int cICONST_4 = 7;
- public final static int cICONST_5 = 8;
- public final static int cLCONST_0 = 9;
- public final static int cLCONST_1 = 10;
- public final static int cFCONST_0 = 11;
- public final static int cFCONST_1 = 12;
- public final static int cFCONST_2 = 13;
- public final static int cDCONST_0 = 14;
- public final static int cDCONST_1 = 15;
- public final static int cBIPUSH = 16;
- public final static int cSIPUSH = 17;
- public final static int cLDC = 18;
- public final static int cLDC_W = 19;
- public final static int cLDC2_W = 20;
- public final static int cILOAD = 21;
- public final static int cLLOAD = 22;
- public final static int cFLOAD = 23;
- public final static int cDLOAD = 24;
- public final static int cALOAD = 25;
- public final static int cILOAD_0 = 26;
- public final static int cILOAD_1 = 27;
- public final static int cILOAD_2 = 28;
- public final static int cILOAD_3 = 29;
- public final static int cLLOAD_0 = 30;
- public final static int cLLOAD_1 = 31;
- public final static int cLLOAD_2 = 32;
- public final static int cLLOAD_3 = 33;
- public final static int cFLOAD_0 = 34;
- public final static int cFLOAD_1 = 35;
- public final static int cFLOAD_2 = 36;
- public final static int cFLOAD_3 = 37;
- public final static int cDLOAD_0 = 38;
- public final static int cDLOAD_1 = 39;
- public final static int cDLOAD_2 = 40;
- public final static int cDLOAD_3 = 41;
- public final static int cALOAD_0 = 42;
- public final static int cALOAD_1 = 43;
- public final static int cALOAD_2 = 44;
- public final static int cALOAD_3 = 45;
- public final static int cIALOAD = 46;
- public final static int cLALOAD = 47;
- public final static int cFALOAD = 48;
- public final static int cDALOAD = 49;
- public final static int cAALOAD = 50;
- public final static int cBALOAD = 51;
- public final static int cCALOAD = 52;
- public final static int cSALOAD = 53;
- public final static int cISTORE = 54;
- public final static int cLSTORE = 55;
- public final static int cFSTORE = 56;
- public final static int cDSTORE = 57;
- public final static int cASTORE = 58;
- public final static int cISTORE_0 = 59;
- public final static int cISTORE_1 = 60;
- public final static int cISTORE_2 = 61;
- public final static int cISTORE_3 = 62;
- public final static int cLSTORE_0 = 63;
- public final static int cLSTORE_1 = 64;
- public final static int cLSTORE_2 = 65;
- public final static int cLSTORE_3 = 66;
- public final static int cFSTORE_0 = 67;
- public final static int cFSTORE_1 = 68;
- public final static int cFSTORE_2 = 69;
- public final static int cFSTORE_3 = 70;
- public final static int cDSTORE_0 = 71;
- public final static int cDSTORE_1 = 72;
- public final static int cDSTORE_2 = 73;
- public final static int cDSTORE_3 = 74;
- public final static int cASTORE_0 = 75;
- public final static int cASTORE_1 = 76;
- public final static int cASTORE_2 = 77;
- public final static int cASTORE_3 = 78;
- public final static int cIASTORE = 79;
- public final static int cLASTORE = 80;
- public final static int cFASTORE = 81;
- public final static int cDASTORE = 82;
- public final static int cAASTORE = 83;
- public final static int cBASTORE = 84;
- public final static int cCASTORE = 85;
- public final static int cSASTORE = 86;
- public final static int cPOP = 87;
- public final static int cPOP2 = 88;
- public final static int cDUP = 89;
- public final static int cDUP_X1 = 90;
- public final static int cDUP_X2 = 91;
- public final static int cDUP2 = 92;
- public final static int cDUP2_X1 = 93;
- public final static int cDUP2_X2 = 94;
- public final static int cSWAP = 95;
- public final static int cIADD = 96;
- public final static int cLADD = 97;
- public final static int cFADD = 98;
- public final static int cDADD = 99;
- public final static int cISUB = 100;
- public final static int cLSUB = 101;
- public final static int cFSUB = 102;
- public final static int cDSUB = 103;
- public final static int cIMUL = 104;
- public final static int cLMUL = 105;
- public final static int cFMUL = 106;
- public final static int cDMUL = 107;
- public final static int cIDIV = 108;
- public final static int cLDIV = 109;
- public final static int cFDIV = 110;
- public final static int cDDIV = 111;
- public final static int cIREM = 112;
- public final static int cLREM = 113;
- public final static int cFREM = 114;
- public final static int cDREM = 115;
- public final static int cINEG = 116;
- public final static int cLNEG = 117;
- public final static int cFNEG = 118;
- public final static int cDNEG = 119;
- public final static int cISHL = 120;
- public final static int cLSHL = 121;
- public final static int cISHR = 122;
- public final static int cLSHR = 123;
- public final static int cIUSHR = 124;
- public final static int cLUSHR = 125;
- public final static int cIAND = 126;
- public final static int cLAND = 127;
- public final static int cIOR = 128;
- public final static int cLOR = 129;
- public final static int cIXOR = 130;
- public final static int cLXOR = 131;
- public final static int cIINC = 132;
- public final static int cI2L = 133;
- public final static int cI2F = 134;
- public final static int cI2D = 135;
- public final static int cL2I = 136;
- public final static int cL2F = 137;
- public final static int cL2D = 138;
- public final static int cF2I = 139;
- public final static int cF2L = 140;
- public final static int cF2D = 141;
- public final static int cD2I = 142;
- public final static int cD2L = 143;
- public final static int cD2F = 144;
- public final static int cI2B = 145;
- public final static int cI2C = 146;
- public final static int cI2S = 147;
- public final static int cLCMP = 148;
- public final static int cFCMPL = 149;
- public final static int cFCMPG = 150;
- public final static int cDCMPL = 151;
- public final static int cDCMPG = 152;
- public final static int cIFEQ = 153;
- public final static int cIFNE = 154;
- public final static int cIFLT = 155;
- public final static int cIFGE = 156;
- public final static int cIFGT = 157;
- public final static int cIFLE = 158;
- public final static int cIF_ICMPEQ = 159;
- public final static int cIF_ICMPNE = 160;
- public final static int cIF_ICMPLT = 161;
- public final static int cIF_ICMPGE = 162;
- public final static int cIF_ICMPGT = 163;
- public final static int cIF_ICMPLE = 164;
- public final static int cIF_ACMPEQ = 165;
- public final static int cIF_ACMPNE = 166;
- public final static int cGOTO = 167;
- public final static int cJSR = 168;
- public final static int cRET = 169;
- public final static int cTABLESWITCH = 170;
- public final static int cLOOKUPSWITCH = 171;
- public final static int cIRETURN = 172;
- public final static int cLRETURN = 173;
- public final static int cFRETURN = 174;
- public final static int cDRETURN = 175;
- public final static int cARETURN = 176;
- public final static int cRETURN = 177;
- public final static int cGETSTATIC = 178;
- public final static int cPUTSTATIC = 179;
- public final static int cGETFIELD = 180;
- public final static int cPUTFIELD = 181;
- public final static int cINVOKEVIRTUAL = 182;
- public final static int cINVOKESPECIAL = 183;
- public final static int cINVOKESTATIC = 184;
- public final static int cINVOKEINTERFACE = 185;
- public final static int cNEW = 187;
- public final static int cNEWARRAY = 188;
- public final static int cANEWARRAY = 189;
- public final static int cARRAYLENGTH = 190;
- public final static int cATHROW = 191;
- public final static int cCHECKCAST = 192;
- public final static int cINSTANCEOF = 193;
- public final static int cMONITORENTER = 194;
- public final static int cMONITOREXIT = 195;
- public final static int cWIDE = 196;
- public final static int cMULTIANEWARRAY = 197;
- public final static int cIFNULL = 198;
- public final static int cIFNONNULL = 199;
- public final static int cGOTO_W = 200;
- public final static int cJSR_W = 201;
-
- // Objects representing instructions
- public final static JOpcode NOP =
- new JOpcode("NOP", cNOP, 1, NO_DATA, NO_DATA, JMP_NEXT);
- public final static JOpcode ACONST_NULL = new JOpcode("ACONST_NULL",
- cACONST_NULL,
- 1,
- REFERENCE_TYPE,
- NO_DATA,
- JMP_NEXT);
- public final static JOpcode ICONST_M1 =
- new JOpcode("ICONST_M1", cICONST_M1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_0 =
- new JOpcode("ICONST_0", cICONST_0, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_1 =
- new JOpcode("ICONST_1", cICONST_1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_2 =
- new JOpcode("ICONST_2", cICONST_2, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_3 =
- new JOpcode("ICONST_3", cICONST_3, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_4 =
- new JOpcode("ICONST_4", cICONST_4, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ICONST_5 =
- new JOpcode("ICONST_5", cICONST_5, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LCONST_0 =
- new JOpcode("LCONST_0", cLCONST_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LCONST_1 =
- new JOpcode("LCONST_1", cLCONST_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FCONST_0 =
- new JOpcode("FCONST_0", cFCONST_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FCONST_1 =
- new JOpcode("FCONST_1", cFCONST_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FCONST_2 =
- new JOpcode("FCONST_2", cFCONST_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DCONST_0 =
- new JOpcode("DCONST_0", cDCONST_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DCONST_1 =
- new JOpcode("DCONST_1", cDCONST_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode BIPUSH =
- new JOpcode("BIPUSH", cBIPUSH, 2, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode SIPUSH =
- new JOpcode("SIPUSH", cSIPUSH, 3, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LDC =
- new JOpcode("LDC", cLDC, 2, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LDC_W =
- new JOpcode("LDC_W", cLDC_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LDC2_W =
- new JOpcode("LDC2_W", cLDC2_W, 3, UNKNOWN_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD =
- new JOpcode("ILOAD", cILOAD, 2, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD =
- new JOpcode("LLOAD", cLLOAD, 2, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD =
- new JOpcode("FLOAD", cFLOAD, 2, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD =
- new JOpcode("DLOAD", cDLOAD, 2, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD =
- new JOpcode("ALOAD", cALOAD, 2, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_0 =
- new JOpcode("ILOAD_0", cILOAD_0, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_1 =
- new JOpcode("ILOAD_1", cILOAD_1, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_2 =
- new JOpcode("ILOAD_2", cILOAD_2, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ILOAD_3 =
- new JOpcode("ILOAD_3", cILOAD_3, 1, INT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_0 =
- new JOpcode("LLOAD_0", cLLOAD_0, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_1 =
- new JOpcode("LLOAD_1", cLLOAD_1, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_2 =
- new JOpcode("LLOAD_2", cLLOAD_2, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode LLOAD_3 =
- new JOpcode("LLOAD_3", cLLOAD_3, 1, LONG_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_0 =
- new JOpcode("FLOAD_0", cFLOAD_0, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_1 =
- new JOpcode("FLOAD_1", cFLOAD_1, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_2 =
- new JOpcode("FLOAD_2", cFLOAD_2, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode FLOAD_3 =
- new JOpcode("FLOAD_3", cFLOAD_3, 1, FLOAT_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_0 =
- new JOpcode("DLOAD_0", cDLOAD_0, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_1 =
- new JOpcode("DLOAD_1", cDLOAD_1, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_2 =
- new JOpcode("DLOAD_2", cDLOAD_2, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode DLOAD_3 =
- new JOpcode("DLOAD_3", cDLOAD_3, 1, DOUBLE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_0 =
- new JOpcode("ALOAD_0", cALOAD_0, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_1 =
- new JOpcode("ALOAD_1", cALOAD_1, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_2 =
- new JOpcode("ALOAD_2", cALOAD_2, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode ALOAD_3 =
- new JOpcode("ALOAD_3", cALOAD_3, 1, REFERENCE_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode IALOAD =
- new JOpcode("IALOAD",
- cIALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JArrayType.INT},
- JMP_NEXT);
- public final static JOpcode LALOAD =
- new JOpcode("LALOAD",
- cLALOAD,
- 1,
- LONG_TYPE,
- new JType[] {JType.INT, JArrayType.LONG},
- JMP_NEXT);
- public final static JOpcode FALOAD =
- new JOpcode("FALOAD",
- cFALOAD,
- 1,
- FLOAT_TYPE,
- new JType[] {JType.INT, JArrayType.FLOAT},
- JMP_NEXT);
- public final static JOpcode DALOAD =
- new JOpcode("DALOAD",
- cDALOAD,
- 1,
- DOUBLE_TYPE,
- new JType[] {JType.INT, JArrayType.DOUBLE},
- JMP_NEXT);
- public final static JOpcode AALOAD =
- new JOpcode("AALOAD",
- cAALOAD,
- 1,
- REFERENCE_TYPE,
- new JType[] {JType.INT, JArrayType.REFERENCE},
- JMP_NEXT);
- public final static JOpcode BALOAD =
- new JOpcode("BALOAD",
- cBALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, new JArrayType(JType.UNKNOWN)},
- JMP_NEXT);
- public final static JOpcode CALOAD =
- new JOpcode("CALOAD",
- cCALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JArrayType.CHAR},
- JMP_NEXT);
- public final static JOpcode SALOAD =
- new JOpcode("SALOAD",
- cSALOAD,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JArrayType.SHORT},
- JMP_NEXT);
- public final static JOpcode ISTORE =
- new JOpcode("ISTORE", cISTORE, 2, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE =
- new JOpcode("LSTORE", cLSTORE, 2, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE =
- new JOpcode("FSTORE", cFSTORE, 2, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE =
- new JOpcode("DSTORE", cDSTORE, 2, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode ASTORE =
- new JOpcode("ASTORE", cASTORE, 2, NO_DATA, REFERENCE_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_0 =
- new JOpcode("ISTORE_0", cISTORE_0, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_1 =
- new JOpcode("ISTORE_1", cISTORE_1, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_2 =
- new JOpcode("ISTORE_2", cISTORE_2, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode ISTORE_3 =
- new JOpcode("ISTORE_3", cISTORE_3, 1, NO_DATA, INT_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_0 =
- new JOpcode("LSTORE_0", cLSTORE_0, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_1 =
- new JOpcode("LSTORE_1", cLSTORE_1, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_2 =
- new JOpcode("LSTORE_2", cLSTORE_2, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode LSTORE_3 =
- new JOpcode("LSTORE_3", cLSTORE_3, 1, NO_DATA, LONG_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_0 =
- new JOpcode("FSTORE_0", cFSTORE_0, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_1 =
- new JOpcode("FSTORE_1", cFSTORE_1, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_2 =
- new JOpcode("FSTORE_2", cFSTORE_2, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode FSTORE_3 =
- new JOpcode("FSTORE_3", cFSTORE_3, 1, NO_DATA, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_0 =
- new JOpcode("DSTORE_0", cDSTORE_0, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_1 =
- new JOpcode("DSTORE_1", cDSTORE_1, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_2 =
- new JOpcode("DSTORE_2", cDSTORE_2, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode DSTORE_3 =
- new JOpcode("DSTORE_3", cDSTORE_3, 1, NO_DATA, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode ASTORE_0 = new JOpcode("ASTORE_0",
- cASTORE_0,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode ASTORE_1 = new JOpcode("ASTORE_1",
- cASTORE_1,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode ASTORE_2 = new JOpcode("ASTORE_2",
- cASTORE_2,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode ASTORE_3 = new JOpcode("ASTORE_3",
- cASTORE_3,
- 1,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_NEXT);
- public final static JOpcode IASTORE =
- new JOpcode("IASTORE",
- cIASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- JArrayType.INT},
- JMP_NEXT);
- public final static JOpcode LASTORE =
- new JOpcode("LASTORE",
- cLASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.LONG,
- JType.INT,
- JArrayType.LONG},
- JMP_NEXT);
- public final static JOpcode FASTORE =
- new JOpcode("FASTORE",
- cFASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.FLOAT,
- JType.INT,
- JArrayType.FLOAT},
- JMP_NEXT);
- public final static JOpcode DASTORE =
- new JOpcode("DASTORE",
- cDASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.DOUBLE,
- JType.INT,
- JArrayType.DOUBLE},
- JMP_NEXT);
- public final static JOpcode AASTORE =
- new JOpcode("AASTORE",
- cAASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.REFERENCE,
- JType.INT,
- JArrayType.REFERENCE},
- JMP_NEXT);
- public final static JOpcode BASTORE =
- new JOpcode("BASTORE",
- cBASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- new JArrayType(JType.UNKNOWN)},
- JMP_NEXT);
- public final static JOpcode CASTORE =
- new JOpcode("CASTORE",
- cCASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- JArrayType.CHAR},
- JMP_NEXT);
- public final static JOpcode SASTORE =
- new JOpcode("SASTORE",
- cSASTORE,
- 1,
- NO_DATA,
- new JType[] { JType.INT,
- JType.INT,
- JArrayType.SHORT},
- JMP_NEXT);
- public final static JOpcode POP =
- new JOpcode("POP", cPOP, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode POP2 =
- new JOpcode("POP2", cPOP2, 1, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode DUP =
- new JOpcode("DUP", cDUP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode DUP_X1 = new JOpcode("DUP_X1",
- cDUP_X1,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode DUP_X2 = new JOpcode("DUP_X2",
- cDUP_X2,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode DUP2 =
- new JOpcode("DUP2", cDUP2, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode DUP2_X1 = new JOpcode("DUP2_X1",
- cDUP2_X1,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode DUP2_X2 = new JOpcode("DUP2_X2",
- cDUP2_X2,
- 1,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode SWAP =
- new JOpcode("SWAP", cSWAP, 1, UNKNOWN_TYPE, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode IADD =
- new JOpcode("IADD",
- cIADD,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LADD =
- new JOpcode("LADD",
- cLADD,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FADD =
- new JOpcode("FADD",
- cFADD,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DADD =
- new JOpcode("DADD",
- cDADD,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode ISUB =
- new JOpcode("ISUB",
- cISUB,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LSUB =
- new JOpcode("LSUB",
- cLSUB,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FSUB =
- new JOpcode("FSUB",
- cFSUB,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DSUB =
- new JOpcode("DSUB",
- cDSUB,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IMUL =
- new JOpcode("IMUL",
- cIMUL,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LMUL =
- new JOpcode("LMUL",
- cLMUL,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FMUL =
- new JOpcode("FMUL",
- cFMUL,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DMUL =
- new JOpcode("DMUL",
- cDMUL,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IDIV =
- new JOpcode("IDIV",
- cIDIV,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LDIV =
- new JOpcode("LDIV",
- cLDIV,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FDIV =
- new JOpcode("FDIV",
- cFDIV,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DDIV =
- new JOpcode("DDIV",
- cDDIV,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IREM =
- new JOpcode("IREM",
- cIREM,
- 1,
- INT_TYPE,
- new JType[] {JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LREM =
- new JOpcode("LREM",
- cLREM,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FREM =
- new JOpcode("FREM",
- cFREM,
- 1,
- FLOAT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DREM =
- new JOpcode("DREM",
- cDREM,
- 1,
- DOUBLE_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode INEG =
- new JOpcode("INEG", cINEG, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode LNEG =
- new JOpcode("LNEG", cLNEG, 1, LONG_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode FNEG =
- new JOpcode("FNEG", cFNEG, 1, FLOAT_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode DNEG =
- new JOpcode("DNEG", cDNEG, 1, DOUBLE_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode ISHL =
- new JOpcode("ISHL", cISHL,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LSHL =
- new JOpcode("LSHL",
- cLSHL,
- 1,
- LONG_TYPE,
- new JType [] { JType.INT, JType.LONG },
- JMP_NEXT);
- public final static JOpcode ISHR =
- new JOpcode("ISHR",
- cISHR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LSHR =
- new JOpcode("LSHR",
- cLSHR,
- 1,
- LONG_TYPE,
- new JType[] { JType.INT, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IUSHR =
- new JOpcode("IUSHR",
- cIUSHR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LUSHR =
- new JOpcode("LUSHR",
- cLUSHR,
- 1,
- LONG_TYPE,
- new JType[] { JType.INT, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IAND =
- new JOpcode("IAND",
- cIAND,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LAND =
- new JOpcode("LAND",
- cLAND,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IOR =
- new JOpcode("IOR",
- cIOR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LOR =
- new JOpcode("LOR",
- cLOR,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IXOR =
- new JOpcode("IXOR",
- cIXOR,
- 1,
- INT_TYPE,
- new JType[] { JType.INT, JType.INT },
- JMP_NEXT);
- public final static JOpcode LXOR =
- new JOpcode("LXOR",
- cLXOR,
- 1,
- LONG_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode IINC =
- new JOpcode("IINC", cIINC, 3, NO_DATA, NO_DATA, JMP_NEXT);
- public final static JOpcode I2L =
- new JOpcode("I2L", cI2L, 1, LONG_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2F =
- new JOpcode("I2F", cI2F, 1, FLOAT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2D =
- new JOpcode("I2D", cI2D, 1, DOUBLE_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode L2I =
- new JOpcode("L2I", cL2I, 1, INT_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode L2F =
- new JOpcode("L2F", cL2F, 1, FLOAT_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode L2D =
- new JOpcode("L2D", cL2D, 1, DOUBLE_TYPE, LONG_TYPE, JMP_NEXT);
- public final static JOpcode F2I =
- new JOpcode("F2I", cF2I, 1, INT_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode F2L =
- new JOpcode("F2L", cF2L, 1, LONG_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode F2D =
- new JOpcode("F2D", cF2D, 1, DOUBLE_TYPE, FLOAT_TYPE, JMP_NEXT);
- public final static JOpcode D2I =
- new JOpcode("D2I", cD2I, 1, INT_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode D2L =
- new JOpcode("D2L", cD2L, 1, LONG_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode D2F =
- new JOpcode("D2F", cD2F, 1, FLOAT_TYPE, DOUBLE_TYPE, JMP_NEXT);
- public final static JOpcode I2B =
- new JOpcode("I2B", cI2B, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2C =
- new JOpcode("I2C", cI2C, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode I2S =
- new JOpcode("I2S", cI2S, 1, INT_TYPE, INT_TYPE, JMP_NEXT);
- public final static JOpcode LCMP =
- new JOpcode("LCMP",
- cLCMP,
- 1,
- INT_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode FCMPL =
- new JOpcode("FCMPL",
- cFCMPL,
- 1,
- INT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode FCMPG =
- new JOpcode("FCMPG",
- cFCMPG,
- 1,
- INT_TYPE,
- new JType[] { JType.FLOAT, JType.FLOAT },
- JMP_NEXT);
- public final static JOpcode DCMPL =
- new JOpcode("DCMPL",
- cDCMPL,
- 1,
- INT_TYPE,
- new JType[] { JType.LONG, JType.LONG },
- JMP_NEXT);
- public final static JOpcode DCMPG =
- new JOpcode("DCMPG",
- cDCMPG,
- 1,
- INT_TYPE,
- new JType[] { JType.DOUBLE, JType.DOUBLE },
- JMP_NEXT);
- public final static JOpcode IFEQ =
- new JOpcode("IFEQ", cIFEQ, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFNE =
- new JOpcode("IFNE", cIFNE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFLT =
- new JOpcode("IFLT", cIFLT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFGE =
- new JOpcode("IFGE", cIFGE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFGT =
- new JOpcode("IFGT", cIFGT, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFLE =
- new JOpcode("IFLE", cIFLE, 3, NO_DATA, INT_TYPE, JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPEQ =
- new JOpcode("IF_ICMPEQ",
- cIF_ICMPEQ,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPNE =
- new JOpcode("IF_ICMPNE",
- cIF_ICMPNE,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPLT =
- new JOpcode("IF_ICMPLT",
- cIF_ICMPLT,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPGE =
- new JOpcode("IF_ICMPGE",
- cIF_ICMPGE,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPGT =
- new JOpcode("IF_ICMPGT",
- cIF_ICMPGT,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ICMPLE =
- new JOpcode("IF_ICMPLE",
- cIF_ICMPLE,
- 3,
- NO_DATA,
- new JType[] { JType.INT, JType.INT },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ACMPEQ =
- new JOpcode("IF_ACMPEQ",
- cIF_ACMPEQ,
- 3,
- NO_DATA,
- new JType[] { JType.REFERENCE, JType.REFERENCE },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IF_ACMPNE =
- new JOpcode("IF_ACMPNE",
- cIF_ACMPNE,
- 3,
- NO_DATA,
- new JType[] { JType.REFERENCE, JType.REFERENCE },
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode GOTO =
- new JOpcode("GOTO", cGOTO, 3, NO_DATA, NO_DATA, JMP_ALWAYS_S2_OFFSET);
- public final static JOpcode JSR =
- new JOpcode("JSR", cJSR, 3, ADDRESS_TYPE, NO_DATA, JMP_ALWAYS_S2_OFFSET);
- public final static JOpcode RET =
- new JOpcode("RET", cRET, 2, NO_DATA, NO_DATA, JMP_NONE);
- public final static JOpcode TABLESWITCH = new JOpcode("TABLESWITCH",
- cTABLESWITCH,
- UNKNOWN,
- NO_DATA,
- INT_TYPE,
- JMP_TABLE);
- public final static JOpcode LOOKUPSWITCH = new JOpcode("LOOKUPSWITCH",
- cLOOKUPSWITCH,
- UNKNOWN,
- NO_DATA,
- INT_TYPE,
- JMP_LOOKUP);
- public final static JOpcode IRETURN =
- new JOpcode("IRETURN", cIRETURN, 1, NO_DATA, INT_TYPE, JMP_NONE);
- public final static JOpcode LRETURN =
- new JOpcode("LRETURN", cLRETURN, 1, NO_DATA, LONG_TYPE, JMP_NONE);
- public final static JOpcode FRETURN =
- new JOpcode("FRETURN", cFRETURN, 1, NO_DATA, FLOAT_TYPE, JMP_NONE);
- public final static JOpcode DRETURN =
- new JOpcode("DRETURN", cDRETURN, 1, NO_DATA, DOUBLE_TYPE, JMP_NONE);
- public final static JOpcode ARETURN = new JOpcode("ARETURN",
- cARETURN,
- 1,
- NO_DATA,
- OBJECT_REF_TYPE,
- JMP_NONE);
- public final static JOpcode RETURN =
- new JOpcode("RETURN", cRETURN, 1, NO_DATA, NO_DATA, JMP_NONE);
- public final static JOpcode GETSTATIC = new JOpcode("GETSTATIC",
- cGETSTATIC,
- 3,
- UNKNOWN_TYPE,
- NO_DATA,
- JMP_NEXT);
- public final static JOpcode PUTSTATIC = new JOpcode("PUTSTATIC",
- cPUTSTATIC,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode GETFIELD = new JOpcode("GETFIELD",
- cGETFIELD,
- 3,
- UNKNOWN_TYPE,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode PUTFIELD =
- new JOpcode("PUTFIELD", cPUTFIELD, 3, NO_DATA, UNKNOWN_TYPE, JMP_NEXT);
- public final static JOpcode INVOKEVIRTUAL = new JOpcode("INVOKEVIRTUAL",
- cINVOKEVIRTUAL,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode INVOKESPECIAL = new JOpcode("INVOKESPECIAL",
- cINVOKESPECIAL,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode INVOKESTATIC = new JOpcode("INVOKESTATIC",
- cINVOKESTATIC,
- 3,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode INVOKEINTERFACE =
- new JOpcode("INVOKEINTERFACE",
- cINVOKEINTERFACE,
- 5,
- NO_DATA,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode NEW =
- new JOpcode("NEW", cNEW, 3, OBJECT_REF_TYPE, NO_DATA, JMP_NEXT);
- public final static JOpcode NEWARRAY =
- new JOpcode("NEWARRAY",
- cNEWARRAY,
- 2,
- ARRAY_REF_TYPE,
- INT_TYPE,
- JMP_NEXT);
- public final static JOpcode ANEWARRAY =
- new JOpcode("ANEWARRAY",
- cANEWARRAY,
- 3,
- ARRAY_REF_TYPE,
- INT_TYPE,
- JMP_NEXT);
- public final static JOpcode ARRAYLENGTH = new JOpcode("ARRAYLENGTH",
- cARRAYLENGTH,
- 1,
- INT_TYPE,
- ARRAY_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode ATHROW = new JOpcode("ATHROW",
- cATHROW,
- 1,
- OBJECT_REF_TYPE,
- OBJECT_REF_TYPE,
- JMP_NONE);
- public final static JOpcode CHECKCAST = new JOpcode("CHECKCAST",
- cCHECKCAST,
- 3,
- OBJECT_REF_TYPE,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode INSTANCEOF = new JOpcode("INSTANCEOF",
- cINSTANCEOF,
- 3,
- INT_TYPE,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode MONITORENTER = new JOpcode("MONITORENTER",
- cMONITORENTER,
- 1,
- NO_DATA,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode MONITOREXIT = new JOpcode("MONITOREXIT",
- cMONITOREXIT,
- 1,
- NO_DATA,
- OBJECT_REF_TYPE,
- JMP_NEXT);
- public final static JOpcode WIDE = new JOpcode("WIDE",
- cWIDE,
- UNKNOWN,
- UNKNOWN_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode MULTIANEWARRAY = new JOpcode("MULTIANEWARRAY",
- cMULTIANEWARRAY,
- 4,
- ARRAY_REF_TYPE,
- UNKNOWN_TYPE,
- JMP_NEXT);
- public final static JOpcode IFNULL = new JOpcode("IFNULL",
- cIFNULL,
- 3,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode IFNONNULL = new JOpcode("IFNONNULL",
- cIFNONNULL,
- 3,
- NO_DATA,
- REFERENCE_TYPE,
- JMP_MAYBE_S2_OFFSET);
- public final static JOpcode GOTO_W = new JOpcode("GOTO_W",
- cGOTO_W,
- 5,
- NO_DATA,
- NO_DATA,
- JMP_ALWAYS_S4_OFFSET);
- public final static JOpcode JSR_W =
- new JOpcode("JSR_W", cJSR_W, 5, ADDRESS_TYPE, NO_DATA, JMP_NEXT);
-
- public final static JOpcode[] OPCODES = {
- NOP, ACONST_NULL, ICONST_M1, ICONST_0, ICONST_1,
- ICONST_2, ICONST_3, ICONST_4, ICONST_5, LCONST_0,
- LCONST_1, FCONST_0, FCONST_1, FCONST_2, DCONST_0,
- DCONST_1, BIPUSH, SIPUSH, LDC, LDC_W,
- LDC2_W, ILOAD, LLOAD, FLOAD, DLOAD,
- ALOAD, ILOAD_0, ILOAD_1, ILOAD_2, ILOAD_3,
- LLOAD_0, LLOAD_1, LLOAD_2, LLOAD_3, FLOAD_0,
- FLOAD_1, FLOAD_2, FLOAD_3, DLOAD_0, DLOAD_1,
- DLOAD_2, DLOAD_3, ALOAD_0, ALOAD_1, ALOAD_2,
- ALOAD_3, IALOAD, LALOAD, FALOAD, DALOAD,
- AALOAD, BALOAD, CALOAD, SALOAD, ISTORE,
- LSTORE, FSTORE, DSTORE, ASTORE, ISTORE_0,
- ISTORE_1, ISTORE_2, ISTORE_3, LSTORE_0, LSTORE_1,
- LSTORE_2, LSTORE_3, FSTORE_0, FSTORE_1, FSTORE_2,
- FSTORE_3, DSTORE_0, DSTORE_1, DSTORE_2, DSTORE_3,
- ASTORE_0, ASTORE_1, ASTORE_2, ASTORE_3, IASTORE,
- LASTORE, FASTORE, DASTORE, AASTORE, BASTORE,
- CASTORE, SASTORE, POP, POP2, DUP,
- DUP_X1, DUP_X2, DUP2, DUP2_X1, DUP2_X2,
- SWAP, IADD, LADD, FADD, DADD,
- ISUB, LSUB, FSUB, DSUB, IMUL,
- LMUL, FMUL, DMUL, IDIV, LDIV,
- FDIV, DDIV, IREM, LREM, FREM,
- DREM, INEG, LNEG, FNEG, DNEG,
- ISHL, LSHL, ISHR, LSHR, IUSHR,
- LUSHR, IAND, LAND, IOR, LOR,
- IXOR, LXOR, IINC, I2L, I2F,
- I2D, L2I, L2F, L2D, F2I,
- F2L, F2D, D2I, D2L, D2F,
- I2B, I2C, I2S, LCMP, FCMPL,
- FCMPG, DCMPL, DCMPG, IFEQ, IFNE,
- IFLT, IFGE, IFGT, IFLE, IF_ICMPEQ,
- IF_ICMPNE, IF_ICMPLT, IF_ICMPGE, IF_ICMPGT, IF_ICMPLE,
- IF_ACMPEQ, IF_ACMPNE, GOTO, JSR, RET,
- TABLESWITCH, LOOKUPSWITCH, IRETURN, LRETURN, FRETURN,
- DRETURN, ARETURN, RETURN, GETSTATIC, PUTSTATIC,
- GETFIELD, PUTFIELD, INVOKEVIRTUAL, INVOKESPECIAL, INVOKESTATIC,
- INVOKEINTERFACE, null, NEW, NEWARRAY, ANEWARRAY,
- ARRAYLENGTH, ATHROW, CHECKCAST, INSTANCEOF, MONITORENTER,
- MONITOREXIT, WIDE, MULTIANEWARRAY, IFNULL, IFNONNULL,
- GOTO_W, JSR_W
- };
-
- protected JOpcode(String name,
- int code,
- int size,
- JType[] producedDataTypes,
- JType[] consumedDataTypes,
- int jumpKind) {
- this.name = name;
- this.code = code;
- this.size = size;
- this.producedDataTypes = producedDataTypes;
- this.consumedDataTypes = consumedDataTypes;
- this.jumpKind = jumpKind;
- switch (jumpKind) {
- case JMP_NONE: successorCount = 0; break;
- case JMP_NEXT: successorCount = 1; break;
- case JMP_ALWAYS_S2_OFFSET: successorCount = 1; break;
- case JMP_ALWAYS_S4_OFFSET: successorCount = 1; break;
- case JMP_MAYBE_S2_OFFSET: successorCount = 2; break;
- case JMP_TABLE: successorCount = UNKNOWN; break;
- case JMP_LOOKUP: successorCount = UNKNOWN; break;
- default: successorCount = UNKNOWN; break;
- }
- }
-
- public String toString() { return name; }
- protected int getSize() { return size; }
- protected JType[] getProducedDataTypes() { return producedDataTypes; }
- protected JType[] getConsumedDataTypes() { return consumedDataTypes; }
-
- protected int getProducedDataSize() {
- if (producedDataTypes != UNKNOWN_TYPE)
- return JType.getTotalSize(producedDataTypes);
- else {
- switch (code) {
- case cLDC: case cLDC_W: case cBALOAD:
- return 1;
- case cLDC2_W: case cDUP: case cSWAP:
- return 2;
- case cDUP_X1:
- return 3;
- case cDUP_X2: case cDUP2:
- return 4;
- case cDUP2_X1:
- return 5;
- case cDUP2_X2:
- return 6;
- default:
- throw new Error(this.toString());
- }
- }
- }
-
- protected int getConsumedDataSize() {
- if (consumedDataTypes != UNKNOWN_TYPE)
- return JType.getTotalSize(consumedDataTypes);
- else {
- switch (code) {
- case cPOP: case cDUP:
- return 1;
- case cPOP2: case cDUP_X1: case cDUP2: case cSWAP:
- return 2;
- case cDUP_X2: case cDUP2_X1:
- return 3;
- case cDUP2_X2:
- return 4;
- default:
- throw new Error(this.toString());
- }
- }
- }
-
- protected int getProducedDataTypesNumber() {
- if (producedDataTypes != UNKNOWN_TYPE)
- return producedDataTypes.length;
- else {
- switch (code) {
- case cLDC: case cLDC_W: case cLDC2_W: case cBALOAD:
- case cGETSTATIC: case cGETFIELD:
- return 1;
- case cDUP: case cSWAP:
- return 2;
- case cDUP_X2: case cDUP2: case cDUP2_X1: case cDUP2_X2:
- return 2;
- case cDUP_X1:
- return 3;
- default:
- throw new Error(this.toString());
- }
- }
- }
-
- protected int getConsumedDataTypesNumber() {
- if (consumedDataTypes != UNKNOWN_TYPE)
- return consumedDataTypes.length;
- else {
- switch (code) {
- case cPOP: case cDUP: case cPUTSTATIC:
- return 1;
- case cPUTFIELD: case cDUP_X1: case cDUP_X2:
- case cDUP2: case cDUP2_X1: case cPOP2: case cSWAP:
- return 2;
- default:
- throw new Error(this.toString());
- }
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
deleted file mode 100644
index 50aa9d3636..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JOtherAttribute.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Attributes which are unknown to the JVM (or at least to this library).
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JOtherAttribute extends JAttribute {
- protected final String name;
- protected final byte[] contents;
- protected final int length;
-
- public JOtherAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- byte[] contents,
- int length) {
- super(context, clazz, name);
- this.name = name;
- this.contents = contents;
- this.length = length;
- }
-
- public JOtherAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.name = name;
- this.contents = new byte[size];
- this.length = size;
-
- stream.read(contents, 0, length);
- }
-
- public String getName() { return name; }
-
- // Follows javap output format for user-defined attributes.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" ");
- buf.append(name);
- buf.append(": length = 0x");
- buf.append(Integer.toHexString(length).toUpperCase());
- for (int i = 0; i < length; ++i) {
- if (i % 16 == 0) buf.append("\n ");
- buf.append(hexString(contents[i]));
- buf.append(" ");
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() { return length; }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.write(contents, 0, length);
- }
-
- private static final String hexString(int i) {
- return ((0 <= i && i < 16) ? "0" : "")+Integer.toHexString(i).toUpperCase();
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java b/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
deleted file mode 100644
index 73d1026c04..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JReferenceType.java
+++ /dev/null
@@ -1,19 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-/**
- * Types for Java references, i.e. arrays and objects.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-abstract public class JReferenceType extends JType {
- public boolean isReferenceType() { return true; }
-
- abstract public String getDescriptor();
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
deleted file mode 100644
index 3a17cb2c44..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JSourceFileAttribute.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-
-/**
- * Sourcefile attribute, which can be attached to class files to
- * associate them with their source file.
- *
- * There can be no more than one SourceFile attribute in the attributes table
- * of a given ClassFile structure. See section 4.8.9 of the JVM specification.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class JSourceFileAttribute extends JAttribute {
- protected final String sourceFileName;
- protected final int sourceFileIndex;
-
- public JSourceFileAttribute(FJBGContext context,
- JClass clazz,
- String sourceFileName) {
- super(context, clazz);
- this.sourceFileName = sourceFileName;
- this.sourceFileIndex = clazz.getConstantPool().addUtf8(sourceFileName);
- }
-
- public JSourceFileAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
-
- this.sourceFileIndex = stream.readShort();
- this.sourceFileName = clazz.getConstantPool().lookupUtf8(sourceFileIndex);
-
- assert name.equals(getName());
- }
-
- public String getName() { return "SourceFile"; }
-
- public String getFileName() { return sourceFileName; }
-
- // Follows javap output format for SourceFile attribute.
- /*@Override*/ public String toString() {
- StringBuffer buf = new StringBuffer(" SourceFile: \"");
- buf.append(sourceFileName);
- buf.append("\"\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return 2; // Short.SIZE
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(sourceFileIndex);
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java b/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
deleted file mode 100644
index 72a5484d40..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JStackMapTableAttribute.java
+++ /dev/null
@@ -1,282 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-
-/**
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-
-public class JStackMapTableAttribute extends JAttribute {
- /** Constant pool of the current classfile. */
- private JConstantPool pool;
-
- /** StackMapTable entries */
- protected final List/*<Frame>*/ entries = new ArrayList();
- protected int entriesSize = 0;
- protected boolean usesU2;
-
- public JStackMapTableAttribute(FJBGContext context,
- JClass clazz,
- JCode code) {
- super(context, clazz);
- this.pool = clazz.pool;
-
- assert code.getOwner().getOwner() == clazz;
- }
-
- public JStackMapTableAttribute(FJBGContext context,
- JClass clazz,
- Object owner,
- String name,
- int size,
- DataInputStream stream)
- throws IOException {
- super(context, clazz, name);
- this.pool = clazz.pool;
-
- int count = stream.readShort();
- this.usesU2 = count < 65536;
- for (int i = 0; i < count; ++i)
- this.entries.add(new Frame(stream));
- this.entriesSize = computeSize();
-
- assert name.equals(getName());
- }
-
- public String getName() { return "StackMapTable"; }
-
- // Follows javap output format for StackMapTable attribute.
- /*@Override*/ public String toString() {
- Frame frame = null;
- StringBuffer buf = new StringBuffer(" StackMapTable: number_of_entries = ");
- buf.append(entries.size());
- Iterator it = entries.iterator();
- while (it.hasNext()) {
- frame = (Frame)it.next();
- buf.append("\n frame_type = ");
- buf.append(frame.tag);
- buf.append(" /* ");
- buf.append(getFrameType(frame.tag));
- buf.append(" */");
- if (frame.offsetDelta != -1)
- buf.append("\n offset_delta = "+frame.offsetDelta);
- if (frame.locals != null)
- appendTypeInfoArray(buf, "locals", frame.locals);
- if (frame.stackItems != null)
- appendTypeInfoArray(buf, "stack", frame.stackItems);
- }
- buf.append("\n");
- return buf.toString();
- }
-
- protected int getSize() {
- return entriesSize;
- }
-
- protected void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeShort(entriesSize);
- Iterator it = entries.iterator();
- while (it.hasNext()) {
- Frame frame = (Frame)it.next();
- frame.writeContentsTo(stream);
- }
- }
-
- private class TypeInfo {
- final int tag;
- final int poolIndexOrOffset; // tag == 7 => poolIndex, tag = 8 => offset
- private int bytes;
- TypeInfo(DataInputStream stream) throws IOException {
- int size = 1;
- this.tag = stream.readByte();
- if (tag == 7) { // ITEM_Object; // 7
- poolIndexOrOffset = stream.readShort();
- size += 2;
- } else if (tag == 8) { // ITEM_Uninitialized // 8
- poolIndexOrOffset = (usesU2) ? stream.readShort() : stream.readInt();
- size += (usesU2) ? 2 : 4;
- } else
- poolIndexOrOffset = -1;
- this.bytes += size;
- }
- int getSize() { return bytes; }
- void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeByte(tag);
- if (tag == 7) { // ITEM_Object; // 7
- stream.writeShort(poolIndexOrOffset);
- } else if (tag == 8) { // ITEM_Uninitialized // 8
- if (usesU2) stream.writeShort(poolIndexOrOffset);
- else stream.writeInt(poolIndexOrOffset);
- }
- }
- /*@Override*/ public String toString() {
- switch (tag) {
- case 0: // ITEM_Top
- return "<top>";
- case 1: // ITEM_Integer
- return "int";
- case 2: // ITEM_Float
- return "float";
- case 3: // ITEM_Double
- return "double";
- case 4: // ITEM_Long
- return "long";
- case 5: // ITEM_Null
- return "null";
- case 6: // ITEM_UninializedThis
- return "this";
- case 7: // ITEM_Object
- String name = pool.lookupClass(poolIndexOrOffset);
- if (name.startsWith("[")) name = "\""+name+"\"";
- return "class "+name;
- case 8: // ITEM_Uninitialized
- return "<uninitialized>";
- default:
- return String.valueOf(tag);
- }
- }
- }
-
- private class Frame {
- final int tag;
- int offsetDelta = -1;
- TypeInfo[] stackItems = null;
- TypeInfo[] locals = null;
- private int bytes;
- Frame(DataInputStream stream) throws IOException {
- // The stack_map_frame structure consists of a one-byte tag
- // followed by zero or more bytes.
- this.tag = stream.readUnsignedByte();
- if (tag < 64) { // SAME; // 0-63
- //done
- } else if (tag < 128) { // SAME_LOCALS_1_STACK_ITEM; // 64-127
- this.offsetDelta = tag - 64;
- readStackItems(stream, 1);
- } else if (tag < 248) { // reserved for future use.
- assert false : "Tags in the range [128-247] are reserved for future use.";
- } else if (tag < 251) { // CHOP; // 248-250
- int k = 251 - tag;
- readOffsetDelta(stream);
- } else if (tag == 251) { // SAME_FRAME_EXTENDED
- readOffsetDelta(stream);
- } else if (tag < 255) { // APPEND; // 252-254
- readOffsetDelta(stream);
- readLocals(stream, tag - 251);
- } else { // FULL_FRAME; // 255
- readOffsetDelta(stream);
- readLocals(stream);
- readStackItems(stream);
- }
- }
- int getSize() { return bytes; }
- void readOffsetDelta(DataInputStream stream) throws IOException {
- this.offsetDelta = (usesU2) ? stream.readShort() : stream.readInt();
- this.bytes += (usesU2) ? 2 : 4;
- }
- int getOffsetDelta() { return offsetDelta; }
- void readStackItems(DataInputStream stream, int k) throws IOException {
- this.stackItems = new TypeInfo[k];
- for (int i = 0; i < k; ++i) {
- stackItems[i] = new TypeInfo(stream);
- this.bytes += stackItems[i].getSize();
- }
- }
- void readStackItems(DataInputStream stream) throws IOException {
- int k = (usesU2) ? stream.readShort() : stream.readInt();
- this.bytes += (usesU2) ? 2 : 4;
- readStackItems(stream, k);
- }
- void readLocals(DataInputStream stream, int k) throws IOException {
- this.locals = new TypeInfo[k];
- for (int i = 0; i < k; ++i) {
- locals[i] = new TypeInfo(stream);
- this.bytes += locals[i].getSize();
- }
- }
- void readLocals(DataInputStream stream) throws IOException {
- int k = (usesU2) ? stream.readShort() : stream.readInt();
- this.bytes += (usesU2) ? 2 : 4;
- readLocals(stream, k);
- }
- void writeContentsTo(DataOutputStream stream) throws IOException {
- stream.writeByte(tag);
- if (tag < 64) {
- //done
- } else if (tag < 128) { // SAME; // 0-63
- assert stackItems.length == 1;
- stackItems[0].writeContentsTo(stream);
- } else if (tag < 248) {
- assert false : "Tags in the range [128-247] are reserved for future use.";
- } else if (tag < 251) {
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- } else if (tag == 251) {
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- } else if (tag < 255) { // APPEND; // 252-254
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- for (int i = 0; i < locals.length; ++i)
- locals[i].writeContentsTo(stream);
- } else {
- if (usesU2) stream.writeShort(offsetDelta);
- else stream.writeInt(offsetDelta);
- for (int i = 0; i < locals.length; ++i)
- locals[i].writeContentsTo(stream);
- for (int i = 0; i < stackItems.length; ++i)
- stackItems[i].writeContentsTo(stream);
- }
- }
- }
-
- private int computeSize() {
- int size = (usesU2) ? 2 : 4; // number of frames
- Iterator it = entries.iterator();
- while (it.hasNext()) {
- Frame frame = (Frame)it.next();
- size += frame.getSize();
- }
- return size;
- }
-
- private static final String getFrameType(int tag) {
- if (tag < 64) return "same";
- else if (tag < 128) return "same locals 1 stack item";
- else if (tag < 248) return "<reserved>";
- else if (tag < 251) return "chop";
- else if (tag == 251) return "same frame extended";
- else if (tag < 255) return "append";
- else return "full frame";
- }
-
- private static StringBuffer appendTypeInfoArray(StringBuffer buf,
- String s, TypeInfo[] a) {
- buf.append("\n ");
- buf.append(s);
- buf.append(" = ");
- if (a.length > 0) {
- buf.append("[ ");
- for (int i = 0; i < a.length; ++i) {
- if (i > 0) buf.append(", ");
- buf.append(a[i]);
- }
- buf.append(" ]");
- }
- else
- buf.append("[]");
- return buf;
- }
-
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JType.java b/src/fjbg/ch/epfl/lamp/fjbg/JType.java
deleted file mode 100644
index 298a2b0565..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/JType.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.IOException;
-import java.io.StringReader;
-import java.util.ArrayList;
-
-/**
- * Representation of Java types.
- *
- * @version 1.0
- * @author Michel Schinz
- */
-
-abstract public class JType {
- abstract public int getSize();
- abstract public String getSignature();
- abstract public int getTag();
- abstract public String toString();
- abstract public boolean isCompatibleWith(JType other);
-
- public boolean isValueType() { return false; }
- public boolean isObjectType() { return false; }
- public boolean isArrayType() { return false; }
- public boolean isReferenceType() { return false; }
-
- // Tags for types. Taken from BCEL.
- public static final int T_BOOLEAN = 4;
- public static final int T_CHAR = 5;
- public static final int T_FLOAT = 6;
- public static final int T_DOUBLE = 7;
- public static final int T_BYTE = 8;
- public static final int T_SHORT = 9;
- public static final int T_INT = 10;
- public static final int T_LONG = 11;
- public static final int T_VOID = 12; // Non-standard
- public static final int T_ARRAY = 13;
- public static final int T_OBJECT = 14;
- public static final int T_UNKNOWN = 15;
- public static final int T_ADDRESS = 16;
-
- public static final int T_REFERENCE = 17; // type compatible with references
-
- public static final JType[] EMPTY_ARRAY = new JType[0];
-
- protected static JType parseSig(StringReader s) throws IOException {
- int nextChar = s.read();
- if (nextChar == -1) throw new IllegalArgumentException();
-
- switch ((char)nextChar) {
- case 'V' : return VOID;
- case 'Z' : return BOOLEAN;
- case 'B' : return BYTE;
- case 'C' : return CHAR;
- case 'S' : return SHORT;
- case 'I' : return INT;
- case 'F' : return FLOAT;
- case 'J' : return LONG;
- case 'D' : return DOUBLE;
- case 'L': {
- StringBuffer className = new StringBuffer();
- for (;;) {
- nextChar = s.read();
- if (nextChar == -1 || nextChar == ';') break;
- className.append(nextChar == '/' ? ':' : ((char)nextChar));
- }
- if (nextChar != ';') throw new IllegalArgumentException();
- return new JObjectType(className.toString());
- }
- case '[': {
- JType elemType = parseSig(s);
- return new JArrayType(elemType);
- }
- case '(': {
- ArrayList argTps = new ArrayList();
- for (;;) {
- s.mark(1);
- nextChar = s.read();
- if (nextChar == -1 || nextChar == ')') break;
- s.reset();
- argTps.add(parseSig(s));
- }
- if (nextChar != ')') throw new IllegalArgumentException("a");
- JType[] argTpsA = (JType[])argTps.toArray(new JType[argTps.size()]);
- JType returnType = parseSig(s);
- return new JMethodType(returnType, argTpsA);
- }
- default:
- throw new IllegalArgumentException();
- }
- }
-
- /**
- * A signature is a string representing the generic type of a field or
- * method, or generic type information for a class declaration.
- * See section 4.4.4 of the JVM specification.
- */
- public static JType parseSignature(String signature) {
- try {
- StringReader sigReader = new StringReader(signature);
- JType parsed = parseSig(sigReader);
- if (sigReader.read() != -1)
- throw new IllegalArgumentException();
- return parsed;
- } catch (IllegalArgumentException e) {
- throw new IllegalArgumentException("invalid signature " + signature);
- } catch (IOException e) {
- throw new Error(e);
- }
- }
-
- public static int getTotalSize(JType[] types) {
- int size = 0;
- for (int i = 0; i < types.length; ++i)
- size += types[i].getSize();
- return size;
- }
-
- protected JType() {}
-
- public static JType VOID = new JType() {
- public int getSize() { return 0; }
- public String getSignature() { return "V"; }
- public int getTag() { return T_VOID; }
- public String toString() { return "void"; }
- public boolean isCompatibleWith(JType other) {
- throw new UnsupportedOperationException("type VOID is no real "
- + "data type therefore "
- + "cannot be assigned to "
- + other.toString());
- }
- };
-
- public static JType BOOLEAN = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "Z"; }
- public int getTag() { return T_BOOLEAN; }
- public String toString() { return "boolean"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType BYTE = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "B"; }
- public int getTag() { return T_BYTE; }
- public String toString() { return "byte"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType CHAR = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "C"; }
- public int getTag() { return T_CHAR; }
- public String toString() { return "char"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType SHORT = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "S"; }
- public int getTag() { return T_SHORT; }
- public String toString() { return "short"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType INT = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "I"; }
- public int getTag() { return T_INT; }
- public String toString() { return "int"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == BOOLEAN
- || other == INT
- || other == BYTE
- || other == CHAR
- || other == SHORT;
- }
- };
-
- public static JType FLOAT = new JType() {
- public int getSize() { return 1; }
- public String getSignature() { return "F"; }
- public int getTag() { return T_FLOAT; }
- public String toString() { return "float"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == FLOAT;
- }
- };
-
- public static JType LONG = new JType() {
- public int getSize() { return 2; }
- public String getSignature() { return "J"; }
- public int getTag() { return T_LONG; }
- public String toString() { return "long"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == LONG;
- }
- };
-
- public static JType DOUBLE = new JType() {
- public int getSize() { return 2; }
- public String getSignature() { return "D"; }
- public int getTag() { return T_DOUBLE; }
- public String toString() { return "double"; }
- public boolean isValueType() { return true; }
- public boolean isCompatibleWith(JType other) {
- return other == DOUBLE;
- }
- };
-
- public static JType REFERENCE = new JType() {
- public int getSize() { return 1; }
- public String getSignature() {
- throw new UnsupportedOperationException("type REFERENCE is no real "
- + "data type and therefore "
- + "has no signature");
- }
- public int getTag() { return T_REFERENCE; }
- public String toString() { return "<reference>"; }
- public boolean isCompatibleWith(JType other) {
- throw new UnsupportedOperationException("type REFERENCE is no real "
- + "data type and therefore "
- + "cannot be assigned to "
- + other.toString());
- }
- };
-
- public static JType ADDRESS = new JType() {
- public int getSize() { return 1; }
- public String getSignature() {
- throw new UnsupportedOperationException("type ADDRESS is no usable "
- + "data type and therefore "
- + "has no signature");
- }
- public int getTag() { return T_ADDRESS; }
- public String toString() { return "<address>"; }
- public boolean isCompatibleWith(JType other) {
- return other == ADDRESS;
- }
- };
-
- public static JType UNKNOWN = new JType() {
- public int getSize() {
- throw new UnsupportedOperationException("type UNKNOWN is no real "
- + "data type and therefore "
- + "has no size");
- }
- public String getSignature() {
- throw new UnsupportedOperationException("type UNKNOWN is no real "
- + "data type and therefore "
- + "has no signature");
- }
- public int getTag() { return T_UNKNOWN; }
- public String toString() { return "<unknown>"; }
- public boolean isCompatibleWith(JType other) {
- throw new UnsupportedOperationException("type UNKNOWN is no real "
- + "data type and therefore "
- + "cannot be assigned to "
- + other.toString());
- }
- };
-
- protected static String tagToString(int tag) {
- switch (tag) {
- case T_BOOLEAN : return "boolean";
- case T_CHAR : return "char";
- case T_FLOAT : return "float";
- case T_DOUBLE : return "double";
- case T_BYTE : return "byte";
- case T_SHORT : return "short";
- case T_INT : return "int";
- case T_LONG : return "long";
- case T_VOID : return "void"; // Non-standard
- case T_ARRAY : return "[]";
- case T_OBJECT : return "Object";
- case T_UNKNOWN : return "<unknown>";
- case T_ADDRESS : return "<address>";
- default: return String.valueOf(tag);
- }
- }
-}
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/Main.java b/src/fjbg/ch/epfl/lamp/fjbg/Main.java
deleted file mode 100644
index 810ee7c400..0000000000
--- a/src/fjbg/ch/epfl/lamp/fjbg/Main.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.fjbg;
-
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.jar.JarFile;
-import java.util.zip.ZipEntry;
-
-/**
- * Main program entry to execute the FJBG reader from the command line.
- *
- * The reader prints out the decoded data in the same output format as
- * javap, the Java bytecode disassembler of the Sun J2SE SDK.
- *
- * @author Stephane Micheloud
- * @version 1.1
- */
-
-public class Main {
- private static final String PRODUCT_STRING = "Fast Java Bytecode Generator";
- private static final String VERSION_STRING = "version 1.1";
-
- private static final int ACTION_USAGE = 0;
- private static final int ACTION_DONE = 1;
- private static final int ACTION_PROCEED = 2;
-
- private static String classPath = ".";
- private static String[] classNames = null;
-
- public static void main(String[] args) {
- switch (parseArgs(args)) {
- case ACTION_USAGE: printUsage(); break;
- case ACTION_PROCEED: processClasses(); break;
- default:
- }
- }
-
- private static void processClasses() {
- FJBGContext fjbgContext = new FJBGContext(49, 0);
- if (classNames.length > 0)
- try {
- for (int i = 0; i < classNames.length; ++i)
- processClass(fjbgContext, classNames[i]);
- } catch (IOException e) {
- System.err.println(e.getMessage());
- }
- else
- System.err.println(
- "No classes were specified on the command line. Try -help.");
- }
-
- private static void processClass(FJBGContext fjbgContext, String className)
- throws IOException {
- InputStream in = getInputStream(className);
- JClass jclass = fjbgContext.JClass(new DataInputStream(in));
- System.out.println(jclass);
- in.close();
- }
-
- private static InputStream getInputStream(String className) throws IOException {
- String name = null;
- String[] paths = classPath.split(File.pathSeparator);
- for (int i = 0; i < paths.length; ++i) {
- File parent = new File(paths[i]);
- if (parent.isDirectory()) {
- name = className.replace('.', File.separatorChar)+".class";
- File f = new File(parent, name);
- if (f.isFile()) return new FileInputStream(f);
- } else if (paths[i].endsWith(".jar")) {
- JarFile f = new JarFile(parent);
- name = className.replace('.', '/')+".class";
- ZipEntry e = f.getEntry(name);
- if (e != null) return f.getInputStream(e);
- }
- }
- throw new IOException("ERROR:Could not find "+className);
- }
-
- private static int parseArgs(String[] args) {
- ArrayList/*<String>*/ classes = new ArrayList();
- String arg = null;
- int action = ACTION_USAGE;
- int i = 0, n = args.length;
- while (i < n) {
- arg = args[i];
- if (arg.equals("-classpath") && (i+1) < n) {
- classPath = args[i+1]; i += 2;
- } else if (arg.equals("-cp") && (i+1) < n) {
- classPath = args[i+1]; i += 2;
- } else if (arg.equals("-help")) {
- i = n+1;
- //} else if (arg.equals("-v")) {
- // verbose = true; i += 1;
- } else if (arg.equals("-version")) {
- System.err.println(PRODUCT_STRING+" "+VERSION_STRING);
- action = ACTION_DONE; i = n+1;
- } else if (arg.startsWith("-")) {
- System.err.println("invalid flag: "+arg);
- i = n+1;
- } else {
- classes.add(arg); i += 1;
- }
- }
- if (i == n && i > 0) {
- classNames = (String[])classes.toArray(new String[classes.size()]);
- action = ACTION_PROCEED;
- }
- return action;
- }
-
- private static void printUsage() {
- System.out.println("Usage: fjbg <options> <classes>");
- System.out.println();
- System.out.println("where possible options include:");
- System.out.println(" -cp <path> Specify where to find user class files");
- System.out.println(" -classpath <path> Specify where to find user class files");
- System.out.println(" -help Print a synopsis of standard options");
- System.out.println(" -version Version information");
- System.out.println();
- System.exit(1);
- }
-}
-
diff --git a/src/fjbg/ch/epfl/lamp/util/ByteArray.java b/src/fjbg/ch/epfl/lamp/util/ByteArray.java
deleted file mode 100644
index b852e1ac1f..0000000000
--- a/src/fjbg/ch/epfl/lamp/util/ByteArray.java
+++ /dev/null
@@ -1,145 +0,0 @@
-/* FJBG -- Fast Java Bytecode Generator
- * Copyright 2002-2013 LAMP/EPFL
- * @author Michel Schinz
- */
-
-package ch.epfl.lamp.util;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-
-/**
- * Array of bytes.
- *
- * @author Michel Schinz
- * @version 1.0
- */
-
-public class ByteArray {
- protected final static int BYTE_BLOCK_BITS = 8;
- protected final static int BYTE_BLOCK_SIZE = 1 << BYTE_BLOCK_BITS;
- protected final static int BYTE_BLOCK_MASK = BYTE_BLOCK_SIZE - 1;
-
- protected byte[][] data = new byte[][] { new byte[BYTE_BLOCK_SIZE] };
- protected int pos = 0; // The next free position.
-
- protected boolean frozen = false;
-
- public ByteArray() { }
-
- public ByteArray(InputStream stream, int size) throws IOException {
- pos = size;
- for (int i = 0; size > 0; ++i) {
- int sizeToRead = Math.min(BYTE_BLOCK_SIZE, size);
- stream.read(data[i], 0, sizeToRead);
-
- size -= sizeToRead;
- if (size > 0) addNewBlock();
- }
- }
-
- public void freeze() { frozen = true; }
-
- public int nextBytePosition() {
- return pos;
- }
-
- public int getSize() {
- return pos;
- }
-
- protected void addNewBlock() {
- int nextBlockPos = pos >>> BYTE_BLOCK_BITS;
- if (nextBlockPos == data.length) {
- byte[][] newData = new byte[data.length * 2][];
- System.arraycopy(data, 0, newData, 0, data.length);
- data = newData;
- }
- assert data[nextBlockPos] == null : pos + " " + nextBlockPos;
- data[nextBlockPos] = new byte[BYTE_BLOCK_SIZE];
- }
-
- protected void addByte(int b) {
- assert !frozen;
-
- if ((pos & BYTE_BLOCK_MASK) == 0 && pos > 0)
- addNewBlock();
- int currPos = pos++;
- data[currPos >>> BYTE_BLOCK_BITS][currPos & BYTE_BLOCK_MASK] = (byte)b;
- }
-
- public void addU1(int i) {
- assert i <= 0xFF : i;
- addByte(i);
- }
-
- public void addU2(int i) {
- assert i <= 0xFFFF : i;
-
- addByte(i >>> 8);
- addByte(i & 0xFF);
- }
-
- public void addU4(int i) {
- addByte(i >>> 24);
- addByte((i >>> 16) & 0xFF);
- addByte((i >>> 8) & 0xFF);
- addByte(i & 0xFF);
- }
-
- public void putByte(int targetPos, int b) {
- assert !frozen;
- assert targetPos < pos : targetPos + " >= " + pos;
-
- data[targetPos >>> BYTE_BLOCK_BITS][targetPos & BYTE_BLOCK_MASK] = (byte)b;
- }
-
- public void putU2(int targetPos, int i) {
- assert i < 0xFFFF : i;
- putByte(targetPos, i >>> 8);
- putByte(targetPos + 1, i & 0xFF);
- }
-
- public void putU4(int targetPos, int i) {
- putByte(targetPos, i >>> 24);
- putByte(targetPos + 1, (i >>> 16) & 0xFF);
- putByte(targetPos + 2, (i >>> 8) & 0xFF);
- putByte(targetPos + 3, i & 0xFF);
- }
-
- public int getU1(int sourcePos) {
- assert sourcePos < pos : sourcePos + " >= " + pos;
- return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK] & 0xFF;
- }
-
- public int getU2(int sourcePos) {
- return (getU1(sourcePos) << 8) | getU1(sourcePos + 1);
- }
-
- public int getU4(int sourcePos) {
- return (getU2(sourcePos) << 16) | getU2(sourcePos + 2);
- }
-
- public int getS1(int sourcePos) {
- assert sourcePos < pos : sourcePos + " >= " + pos;
- return data[sourcePos >>> BYTE_BLOCK_BITS][sourcePos & BYTE_BLOCK_MASK];
- }
-
- public int getS2(int sourcePos) {
- return (getS1(sourcePos) << 8) | getU1(sourcePos + 1);
- }
-
- public int getS4(int sourcePos) {
- return (getS2(sourcePos) << 16) | getU2(sourcePos + 2);
- }
-
- public void writeTo(OutputStream stream) throws IOException {
- if (!frozen) freeze();
-
- for (int i = 0; i < data.length && data[i] != null; ++i) {
- int len = Math.min(BYTE_BLOCK_SIZE, pos - (i << BYTE_BLOCK_BITS));
- stream.write(data[i], 0, len);
- }
- }
-}
diff --git a/src/intellij/compiler.iml.SAMPLE b/src/intellij/compiler.iml.SAMPLE
index 696c347b7b..f8b1f31327 100644
--- a/src/intellij/compiler.iml.SAMPLE
+++ b/src/intellij/compiler.iml.SAMPLE
@@ -20,8 +20,6 @@
<orderEntry type="module" module-name="library" />
<orderEntry type="module" module-name="reflect" />
<orderEntry type="module" module-name="asm" />
- <orderEntry type="module" module-name="fjbg" />
- <orderEntry type="module" module-name="msil" />
<orderEntry type="library" name="ant" level="application" />
<orderEntry type="library" name="jline" level="project" />
</component>
diff --git a/src/intellij/msil.iml.SAMPLE b/src/intellij/continuations-library.iml.SAMPLE
index 56f794785f..364cc3dcdb 100644
--- a/src/intellij/msil.iml.SAMPLE
+++ b/src/intellij/continuations-library.iml.SAMPLE
@@ -12,9 +12,8 @@
</component>
<component name="NewModuleRootManager" inherit-compiler-output="true">
<exclude-output />
- <content url="file://$MODULE_DIR$/../msil">
- <sourceFolder url="file://$MODULE_DIR$/../msil" isTestSource="false" />
- <excludeFolder url="file://$MODULE_DIR$/../msil/ch/epfl/lamp/compiler/msil/tests" />
+ <content url="file://$MODULE_DIR$/../continuations/library">
+ <sourceFolder url="file://$MODULE_DIR$/../continuations/library" isTestSource="false" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
diff --git a/src/intellij/continuations-plugin.iml.SAMPLE b/src/intellij/continuations-plugin.iml.SAMPLE
new file mode 100644
index 0000000000..27213374b3
--- /dev/null
+++ b/src/intellij/continuations-plugin.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../continuations/plugin">
+ <sourceFolder url="file://$MODULE_DIR$/../continuations/plugin" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="reflect" />
+ </component>
+</module>
+
diff --git a/src/intellij/fjbg.iml.SAMPLE b/src/intellij/fjbg.iml.SAMPLE
deleted file mode 100644
index 03eca69246..0000000000
--- a/src/intellij/fjbg.iml.SAMPLE
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<module type="JAVA_MODULE" version="4">
- <component name="NewModuleRootManager" inherit-compiler-output="true">
- <exclude-output />
- <content url="file://$MODULE_DIR$/../fjbg">
- <sourceFolder url="file://$MODULE_DIR$/../fjbg" isTestSource="false" />
- </content>
- <orderEntry type="inheritedJdk" />
- <orderEntry type="sourceFolder" forTests="false" />
- </component>
-</module>
-
diff --git a/src/intellij/interactive.iml.SAMPLE b/src/intellij/interactive.iml.SAMPLE
new file mode 100644
index 0000000000..c6c8ebb606
--- /dev/null
+++ b/src/intellij/interactive.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../interactive">
+ <sourceFolder url="file://$MODULE_DIR$/../interactive" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ <orderEntry type="module" module-name="scaladoc" />
+ </component>
+</module>
diff --git a/src/intellij/repl.iml.SAMPLE b/src/intellij/repl.iml.SAMPLE
new file mode 100644
index 0000000000..5e11ff1cf6
--- /dev/null
+++ b/src/intellij/repl.iml.SAMPLE
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../repl">
+ <sourceFolder url="file://$MODULE_DIR$/../repl" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ </component>
+</module>
+
diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE
index 37307c2029..61c813df01 100644
--- a/src/intellij/scala-lang.ipr.SAMPLE
+++ b/src/intellij/scala-lang.ipr.SAMPLE
@@ -198,14 +198,17 @@
<module fileurl="file://$PROJECT_DIR$/actors.iml" filepath="$PROJECT_DIR$/actors.iml" />
<module fileurl="file://$PROJECT_DIR$/asm.iml" filepath="$PROJECT_DIR$/asm.iml" />
<module fileurl="file://$PROJECT_DIR$/compiler.iml" filepath="$PROJECT_DIR$/compiler.iml" />
- <module fileurl="file://$PROJECT_DIR$/fjbg.iml" filepath="$PROJECT_DIR$/fjbg.iml" />
+ <module fileurl="file://$PROJECT_DIR$/continuations-library.iml" filepath="$PROJECT_DIR$/continuations-library.iml" />
+ <module fileurl="file://$PROJECT_DIR$/continuations-plugin.iml" filepath="$PROJECT_DIR$/continuations-plugin.iml" />
<module fileurl="file://$PROJECT_DIR$/forkjoin.iml" filepath="$PROJECT_DIR$/forkjoin.iml" />
+ <module fileurl="file://$PROJECT_DIR$/interactive.iml" filepath="$PROJECT_DIR$/interactive.iml" />
<module fileurl="file://$PROJECT_DIR$/library.iml" filepath="$PROJECT_DIR$/library.iml" />
<module fileurl="file://$PROJECT_DIR$/manual.iml" filepath="$PROJECT_DIR$/manual.iml" />
- <module fileurl="file://$PROJECT_DIR$/msil.iml" filepath="$PROJECT_DIR$/msil.iml" />
<module fileurl="file://$PROJECT_DIR$/partest.iml" filepath="$PROJECT_DIR$/partest.iml" />
<module fileurl="file://$PROJECT_DIR$/reflect.iml" filepath="$PROJECT_DIR$/reflect.iml" />
+ <module fileurl="file://$PROJECT_DIR$/repl.iml" filepath="$PROJECT_DIR$/repl.iml" />
<module fileurl="file://$PROJECT_DIR$/scala.iml" filepath="$PROJECT_DIR$/scala.iml" />
+ <module fileurl="file://$PROJECT_DIR$/scaladoc.iml" filepath="$PROJECT_DIR$/scaladoc.iml" />
<module fileurl="file://$PROJECT_DIR$/scalap.iml" filepath="$PROJECT_DIR$/scalap.iml" />
<module fileurl="file://$PROJECT_DIR$/swing.iml" filepath="$PROJECT_DIR$/swing.iml" />
<module fileurl="file://$PROJECT_DIR$/test.iml" filepath="$PROJECT_DIR$/test.iml" />
@@ -230,7 +233,6 @@
<root url="file://$PROJECT_DIR$/../../build/locker/classes/library" />
<root url="file://$PROJECT_DIR$/../../build/locker/classes/compiler" />
<root url="file://$PROJECT_DIR$/../../build/locker/classes/reflect" />
- <root url="file://$PROJECT_DIR$/../../build/libs/classes/fjbg" />
<root url="file://$PROJECT_DIR$/../../build/asm/classes" />
</CLASSES>
<JAVADOC />
diff --git a/src/intellij/scaladoc.iml.SAMPLE b/src/intellij/scaladoc.iml.SAMPLE
new file mode 100644
index 0000000000..6cc609919c
--- /dev/null
+++ b/src/intellij/scaladoc.iml.SAMPLE
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<module type="JAVA_MODULE" version="4">
+ <component name="FacetManager">
+ <facet type="scala" name="Scala">
+ <configuration>
+ <option name="compilerLibraryLevel" value="Project" />
+ <option name="compilerLibraryName" value="compiler-locker" />
+ <option name="maximumHeapSize" value="1536" />
+ <option name="vmOptions" value="-Xms1536m -Xss1m -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=256m -XX:+CMSClassUnloadingEnabled -XX:+UseCompressedOops -XX:+UseParallelGC" />
+ </configuration>
+ </facet>
+ </component>
+ <component name="NewModuleRootManager" inherit-compiler-output="true">
+ <exclude-output />
+ <content url="file://$MODULE_DIR$/../scaladoc">
+ <sourceFolder url="file://$MODULE_DIR$/../scaladoc" isTestSource="false" />
+ </content>
+ <orderEntry type="inheritedJdk" />
+ <orderEntry type="sourceFolder" forTests="false" />
+ <orderEntry type="module" module-name="library" />
+ <orderEntry type="module" module-name="reflect" />
+ <orderEntry type="module" module-name="compiler" />
+ </component>
+</module>
diff --git a/src/intellij/test.iml.SAMPLE b/src/intellij/test.iml.SAMPLE
index 112fec428f..3ce369be05 100644
--- a/src/intellij/test.iml.SAMPLE
+++ b/src/intellij/test.iml.SAMPLE
@@ -12,9 +12,7 @@
<orderEntry type="module" module-name="swing" />
<orderEntry type="module" module-name="partest" />
<orderEntry type="module" module-name="asm" />
- <orderEntry type="module" module-name="fjbg" />
<orderEntry type="module" module-name="forkjoin" />
- <orderEntry type="module" module-name="msil" />
</component>
</module>
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
index af82957a2e..d036a6e853 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala
@@ -7,8 +7,6 @@ package interactive
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.AbstractFile
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
import scala.tools.nsc.util.FailedInterrupt
import scala.tools.nsc.util.EmptyAction
import scala.tools.nsc.util.WorkScheduler
@@ -45,8 +43,6 @@ import scala.tools.nsc.util.InterruptReq
*/
trait CompilerControl { self: Global =>
- import syntaxAnalyzer.UnitParser
-
type Response[T] = scala.tools.nsc.interactive.Response[T]
/** The scheduler by which client and compiler communicate
@@ -120,7 +116,7 @@ trait CompilerControl { self: Global =>
case ri: ReloadItem if ri.sources == sources => Some(ri)
case _ => None
}
- superseeded.foreach(_.response.set())
+ superseeded.foreach(_.response.set(()))
postWorkItem(new ReloadItem(sources, response))
}
@@ -139,7 +135,6 @@ trait CompilerControl { self: Global =>
/** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
* @pre `source` needs to be loaded.
- *
* @note Deprecated because of race conditions in the typechecker when the background compiler
* is interrupted while typing the same `source`.
* @see SI-6578
@@ -241,9 +236,6 @@ trait CompilerControl { self: Global =>
* prints its output and all defined values in a comment column.
*
* @param source The source file to be analyzed
- * @param keepLoaded If set to `true`, source file will be kept as a loaded unit afterwards.
- * If keepLoaded is `false` the operation is run at low priority, only after
- * everything is brought up to date in a regular type checker run.
* @param response The response.
*/
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
@@ -269,7 +261,7 @@ trait CompilerControl { self: Global =>
* compiler thread.
*/
def parseTree(source: SourceFile): Tree = {
- new UnitParser(new CompilationUnit(source)).parse()
+ newUnitParser(new CompilationUnit(source)).parse()
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
@@ -303,6 +295,14 @@ trait CompilerControl { self: Global =>
val tpe: Type
val accessible: Boolean
def implicitlyAdded = false
+
+ private def accessible_s = if (accessible) "" else "[inaccessible] "
+ def forceInfoString = {
+ definitions.fullyInitializeSymbol(sym)
+ definitions.fullyInitializeType(tpe)
+ infoString
+ }
+ def infoString = s"$accessible_s${sym.defStringSeenAs(tpe)}"
}
case class TypeMember(
@@ -345,7 +345,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskTypeAtItem(val pos: Position, response: Response[Tree]) extends WorkItem {
+ case class AskTypeAtItem(pos: Position, response: Response[Tree]) extends WorkItem {
def apply() = self.getTypedTreeAt(pos, response)
override def toString = "typeat "+pos.source+" "+pos.show
@@ -353,7 +353,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskTypeItem(val source: SourceFile, val forceReload: Boolean, response: Response[Tree]) extends WorkItem {
+ case class AskTypeItem(source: SourceFile, forceReload: Boolean, response: Response[Tree]) extends WorkItem {
def apply() = self.getTypedTree(source, forceReload, response)
override def toString = "typecheck"
@@ -361,7 +361,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskTypeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ case class AskTypeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
def apply() = self.getTypeCompletion(pos, response)
override def toString = "type completion "+pos.source+" "+pos.show
@@ -369,7 +369,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskScopeCompletionItem(val pos: Position, response: Response[List[Member]]) extends WorkItem {
+ case class AskScopeCompletionItem(pos: Position, response: Response[List[Member]]) extends WorkItem {
def apply() = self.getScopeCompletion(pos, response)
override def toString = "scope completion "+pos.source+" "+pos.show
@@ -387,7 +387,7 @@ trait CompilerControl { self: Global =>
def raiseMissing() = ()
}
- case class AskLinkPosItem(val sym: Symbol, val source: SourceFile, response: Response[Position]) extends WorkItem {
+ case class AskLinkPosItem(sym: Symbol, source: SourceFile, response: Response[Position]) extends WorkItem {
def apply() = self.getLinkPos(sym, source, response)
override def toString = "linkpos "+sym+" in "+source
@@ -395,7 +395,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+ case class AskDocCommentItem(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
def apply() = self.getDocComment(sym, source, site, fragments, response)
override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
@@ -403,7 +403,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
+ case class AskLoadedTypedItem(source: SourceFile, response: Response[Tree]) extends WorkItem {
def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
override def toString = "wait loaded & typed "+source
@@ -411,7 +411,7 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskParsedEnteredItem(val source: SourceFile, val keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
+ case class AskParsedEnteredItem(source: SourceFile, keepLoaded: Boolean, response: Response[Tree]) extends WorkItem {
def apply() = self.getParsedEntered(source, keepLoaded, response, this.onCompilerThread)
override def toString = "getParsedEntered "+source+", keepLoaded = "+keepLoaded
@@ -420,7 +420,7 @@ trait CompilerControl { self: Global =>
}
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- case class AskInstrumentedItem(val source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
+ case class AskInstrumentedItem(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) extends WorkItem {
def apply() = self.getInstrumented(source, line, response)
override def toString = "getInstrumented "+source
diff --git a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
index b2568e34bd..93ef4c4d6c 100644
--- a/src/compiler/scala/tools/nsc/interactive/ContextTrees.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala
@@ -6,7 +6,6 @@ package scala.tools.nsc
package interactive
import scala.collection.mutable.ArrayBuffer
-import scala.reflect.internal.util.Position
trait ContextTrees { self: Global =>
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 84670750d7..99f2cd4056 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -8,31 +8,93 @@ package interactive
import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
import scala.collection.mutable
import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
-import scala.concurrent.SyncVar
import scala.util.control.ControlThrowable
import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
-import scala.tools.nsc.util.{ WorkScheduler, MultiHashMap }
-import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, RangePosition, NoPosition }
+import scala.tools.nsc.util.MultiHashMap
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
import scala.tools.nsc.reporters._
import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
-import scala.tools.nsc.io.Pickler._
-import scala.tools.nsc.typechecker.DivergentImplicit
-import scala.annotation.tailrec
+import scala.tools.nsc.doc.ScaladocAnalyzer
+import scala.tools.nsc.typechecker.Analyzer
import symtab.Flags.{ACCESSOR, PARAMACCESSOR}
-import scala.annotation.elidable
+import scala.annotation.{ elidable, tailrec }
import scala.language.implicitConversions
+trait InteractiveScaladocAnalyzer extends InteractiveAnalyzer with ScaladocAnalyzer {
+ val global : Global
+ override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper {
+ override def canAdaptConstantTypeToLiteral = false
+ }
+}
+
+trait InteractiveAnalyzer extends Analyzer {
+ val global : Global
+ import global._
+
+ override def newTyper(context: Context): InteractiveTyper = new Typer(context) with InteractiveTyper
+ override def newNamer(context: Context): InteractiveNamer = new Namer(context) with InteractiveNamer
+ override protected def newPatternMatching = false
+
+ trait InteractiveTyper extends Typer {
+ override def canAdaptConstantTypeToLiteral = false
+ override def canTranslateEmptyListToNil = false
+ override def missingSelectErrorTree(tree: Tree, qual: Tree, name: Name): Tree = tree match {
+ case Select(_, _) => treeCopy.Select(tree, qual, name)
+ case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name)
+ }
+ }
+
+ trait InteractiveNamer extends Namer {
+ override def saveDefaultGetter(meth: Symbol, default: Symbol) {
+ // save the default getters as attachments in the method symbol. if compiling the
+ // same local block several times (which can happen in interactive mode) we might
+ // otherwise not find the default symbol, because the second time it the method
+ // symbol will be re-entered in the scope but the default parameter will not.
+ meth.attachments.get[DefaultsOfLocalMethodAttachment] match {
+ case Some(att) => att.defaultGetters += default
+ case None => meth.updateAttachment(new DefaultsOfLocalMethodAttachment(default))
+ }
+ }
+ // this logic is needed in case typer was interrupted half
+ // way through and then comes back to do the tree again. In
+ // that case the definitions that were already attributed as
+ // well as any default parameters of such methods need to be
+ // re-entered in the current scope.
+ override def enterExistingSym(sym: Symbol): Context = {
+ if (sym != null && sym.owner.isTerm) {
+ enterIfNotThere(sym)
+ if (sym.isLazy)
+ sym.lazyAccessor andAlso enterIfNotThere
+
+ for (defAtt <- sym.attachments.get[DefaultsOfLocalMethodAttachment])
+ defAtt.defaultGetters foreach enterIfNotThere
+ }
+ super.enterExistingSym(sym)
+ }
+ override def enterIfNotThere(sym: Symbol) {
+ val scope = context.scope
+ @tailrec def search(e: ScopeEntry) {
+ if ((e eq null) || (e.owner ne scope))
+ scope enter sym
+ else if (e.sym ne sym) // otherwise, aborts since we found sym
+ search(e.tail)
+ }
+ search(scope lookupEntry sym.name)
+ }
+ }
+}
+
+
/** The main class of the presentation compiler in an interactive environment such as an IDE
*/
-class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
+class Global(settings: Settings, _reporter: Reporter, projectName: String = "") extends {
/* Is the compiler initializing? Early def, so that the field is true during the
* execution of the super constructor.
*/
private var initializing = true
+ override val useOffsetPositions = false
} with scala.tools.nsc.Global(settings, _reporter)
with CompilerControl
- with RangePositions
with ContextTrees
with RichCompilationUnits
with ScratchPadMaker
@@ -72,8 +134,26 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
@inline final def informIDE(msg: => String) =
if (verboseIDE) println("[%s][%s]".format(projectName, msg))
+ // don't keep the original owner in presentation compiler runs
+ // (the map will grow indefinitely, and the only use case is the backend)
+ override protected def saveOriginalOwner(sym: Symbol) { }
+ override protected def originalEnclosingMethod(sym: Symbol) =
+ abort("originalOwner is not kept in presentation compiler runs.")
+
override def forInteractive = true
+ override def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new InteractiveAsSeenFromMap(pre, clazz)
+
+ class InteractiveAsSeenFromMap(pre: Type, clazz: Symbol) extends AsSeenFromMap(pre, clazz) {
+ /** The method formerly known as 'instParamsRelaxed' goes here if it's still necessary,
+ * which it is currently supposed it is not.
+ *
+ * If it is, change AsSeenFromMap method correspondingTypeArgument to call an overridable
+ * method rather than aborting in the failure case.
+ */
+ }
+
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
val unitOfFile = new LinkedHashMap[AbstractFile, RichCompilationUnit] with
@@ -131,6 +211,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ override lazy val analyzer = new {
+ val global: Global.this.type = Global.this
+ } with InteractiveAnalyzer
+
private def cleanAllResponses() {
cleanResponses(waitLoadedTypeResponses)
cleanResponses(getParsedEnteredResponses)
@@ -285,7 +369,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* top-level idents. Therefore, we can detect top-level symbols that have a name
* different from their source file
*/
- override lazy val loaders = new BrowsingLoaders {
+ override lazy val loaders: SymbolLoaders { val global: Global.this.type } = new BrowsingLoaders {
val global: Global.this.type = Global.this
}
@@ -363,14 +447,14 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
// don't forget to service interrupt requests
- val iqs = scheduler.dequeueAllInterrupts(_.execute())
+ scheduler.dequeueAllInterrupts(_.execute())
debugLog("ShutdownReq: cleaning work queue (%d items)".format(units.size))
debugLog("Cleanup up responses (%d loadedType pending, %d parsedEntered pending)"
.format(waitLoadedTypeResponses.size, getParsedEnteredResponses.size))
checkNoResponsesOutstanding()
- log.flush();
+ log.flush()
scheduler = new NoWorkScheduler
throw ShutdownReq
}
@@ -401,41 +485,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
if (typerRun != currentTyperRun) demandNewCompilerRun()
}
- def debugInfo(source : SourceFile, start : Int, length : Int): String = {
- println("DEBUG INFO "+source+"/"+start+"/"+length)
- val end = start+length
- val pos = rangePos(source, start, start, end)
-
- val tree = locateTree(pos)
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- newTreePrinter(pw).print(tree)
- pw.flush
-
- val typed = new Response[Tree]
- askTypeAt(pos, typed)
- val typ = typed.get.left.toOption match {
- case Some(tree) =>
- val sw = new StringWriter
- val pw = new PrintWriter(sw)
- newTreePrinter(pw).print(tree)
- pw.flush
- sw.toString
- case None => "<None>"
- }
-
- val completionResponse = new Response[List[Member]]
- askTypeCompletion(pos, completionResponse)
- val completion = completionResponse.get.left.toOption match {
- case Some(members) =>
- members mkString "\n"
- case None => "<None>"
- }
-
- source.content.view.drop(start).take(length).mkString+" : "+source.path+" ("+start+", "+end+
- ")\n\nlocateTree:\n"+sw.toString+"\n\naskTypeAt:\n"+typ+"\n\ncompletion:\n"+completion
- }
-
// ----------------- The Background Runner Thread -----------------------
private var threadId = 0
@@ -497,7 +546,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
for (s <- allSources; if !ignoredFiles(s.file); unit <- getUnit(s)) {
try {
if (!unit.isUpToDate)
- if (unit.problems.isEmpty || !settings.YpresentationStrict.value)
+ if (unit.problems.isEmpty || !settings.YpresentationStrict)
typeCheck(unit)
else debugLog("%s has syntax errors. Skipped typechecking".format(unit))
else debugLog("already up to date: "+unit)
@@ -794,10 +843,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
try {
val tp1 = pre.memberType(alt) onTypeError NoType
val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
- matchesType(tp1, tp2, false) || {
+ matchesType(tp1, tp2, alwaysMatchSimple = false) || {
debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
- matchesType(tp1, tp3, false) || {
+ matchesType(tp1, tp3, alwaysMatchSimple = false) || {
debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
false
}
@@ -876,14 +925,14 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
def stabilizedType(tree: Tree): Type = tree match {
- case Ident(_) if tree.symbol.isStable =>
+ case Ident(_) if treeInfo.admitsTypeSelection(tree) =>
singleType(NoPrefix, tree.symbol)
- case Select(qual, _) if qual.tpe != null && tree.symbol.isStable =>
+ case Select(qual, _) if treeInfo.admitsTypeSelection(tree) =>
singleType(qual.tpe, tree.symbol)
case Import(expr, selectors) =>
tree.symbol.info match {
case analyzer.ImportType(expr) => expr match {
- case s@Select(qual, name) => singleType(qual.tpe, s.symbol)
+ case s@Select(qual, name) if treeInfo.admitsTypeSelection(expr) => singleType(qual.tpe, s.symbol)
case i : Ident => i.tpe
case _ => tree.tpe
}
@@ -900,8 +949,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
respond(response) { scopeMembers(pos) }
}
- private val Dollar = newTermName("$")
-
private class Members[M <: Member] extends LinkedHashMap[Name, Set[M]] {
override def default(key: Name) = Set()
@@ -917,7 +964,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
def add(sym: Symbol, pre: Type, implicitlyAdded: Boolean)(toMember: (Symbol, Type) => M) {
if ((sym.isGetter || sym.isSetter) && sym.accessed != NoSymbol) {
add(sym.accessed, pre, implicitlyAdded)(toMember)
- } else if (!sym.name.decodedName.containsName(Dollar) && !sym.isSynthetic && sym.hasRawInfo) {
+ } else if (!sym.name.decodedName.containsName("$") && !sym.isSynthetic && sym.hasRawInfo) {
val symtpe = pre.memberType(sym) onTypeError ErrorType
matching(sym, symtpe, this(sym.name)) match {
case Some(m) =>
@@ -947,8 +994,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
val locals = new Members[ScopeMember]
val enclosing = new Members[ScopeMember]
def addScopeMember(sym: Symbol, pre: Type, viaImport: Tree) =
- locals.add(sym, pre, false) { (s, st) =>
- new ScopeMember(s, st, context.isAccessible(s, pre, false), viaImport)
+ locals.add(sym, pre, implicitlyAdded = false) { (s, st) =>
+ new ScopeMember(s, st, context.isAccessible(s, pre, superAccess = false), viaImport)
}
def localsToEnclosing() = {
enclosing.addNonShadowed(locals)
@@ -988,23 +1035,21 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
private def typeMembers(pos: Position): Stream[List[TypeMember]] = {
- var tree = typedTreeAt(pos)
-
- // if tree consists of just x. or x.fo where fo is not yet a full member name
- // ignore the selection and look in just x.
- tree match {
- case Select(qual, name) if tree.tpe == ErrorType => tree = qual
- case _ =>
+ // Choosing which tree will tell us the type members at the given position:
+ // If pos leads to an Import, type the expr
+ // If pos leads to a Select, type the qualifier as long as it is not erroneous
+ // (this implies discarding the possibly incomplete name in the Select node)
+ // Otherwise, type the tree found at 'pos' directly.
+ val tree0 = typedTreeAt(pos) match {
+ case sel @ Select(qual, _) if sel.tpe == ErrorType => qual
+ case Import(expr, _) => expr
+ case t => t
}
-
val context = doLocateContext(pos)
-
- if (tree.tpe == null)
- // TODO: guard with try/catch to deal with ill-typed qualifiers.
- tree = analyzer.newTyper(context).typedQualifier(tree)
+ // TODO: guard with try/catch to deal with ill-typed qualifiers.
+ val tree = if (tree0.tpe eq null) analyzer newTyper context typedQualifier tree0 else tree0
debugLog("typeMembers at "+tree+" "+tree.tpe)
-
val superAccess = tree.isInstanceOf[Super]
val members = new Members[TypeMember]
@@ -1040,17 +1085,17 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
for (sym <- ownerTpe.members)
addTypeMember(sym, pre, sym.owner != ownerTpe.typeSymbol, NoSymbol)
members.allMembers #:: {
- //print("\nadd pimped")
+ //print("\nadd enrichment")
val applicableViews: List[SearchResult] =
if (ownerTpe.isErroneous) List()
else new ImplicitSearch(
- tree, functionType(List(ownerTpe), AnyClass.tpe), isView = true,
+ tree, functionType(List(ownerTpe), AnyTpe), isView = true,
context0 = context.makeImplicit(reportAmbiguousErrors = false)).allImplicits
for (view <- applicableViews) {
val vtree = viewApply(view)
val vpre = stabilizedType(vtree)
for (sym <- vtree.tpe.members) {
- addTypeMember(sym, vpre, false, view.tree.symbol)
+ addTypeMember(sym, vpre, inherited = false, view.tree.symbol)
}
}
//println()
@@ -1063,7 +1108,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
getUnit(source) match {
case Some(unit) =>
if (unit.isUpToDate) {
- debugLog("already typed");
+ debugLog("already typed")
response set unit.body
} else if (ignoredFiles(source.file)) {
response.raise(lastException.getOrElse(CancelException))
@@ -1110,7 +1155,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
@deprecated("SI-6458: Instrumentation logic will be moved out of the compiler.","2.10.0")
- def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) =
+ def getInstrumented(source: SourceFile, line: Int, response: Response[(String, Array[Char])]) {
try {
interruptsEnabled = false
respond(response) {
@@ -1119,18 +1164,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
} finally {
interruptsEnabled = true
}
+ }
// ---------------- Helper classes ---------------------------
- /** A transformer that replaces tree `from` with tree `to` in a given tree */
- class TreeReplacer(from: Tree, to: Tree) extends Transformer {
- override def transform(t: Tree): Tree = {
- if (t == from) to
- else if ((t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
- else t
- }
- }
-
/** The typer run */
class TyperRun extends Run {
// units is always empty
@@ -1150,7 +1187,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* @return true iff typechecked correctly
*/
private def applyPhase(phase: Phase, unit: CompilationUnit) {
- atPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
+ enteringPhase(phase) { phase.asInstanceOf[GlobalPhase] applyPhase unit }
}
}
@@ -1173,14 +1210,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
case ex: TypeError =>
debugLog("type error caught: "+ex)
alt
- case ex: DivergentImplicit =>
- if (settings.Xdivergence211.value) {
- debugLog("this shouldn't happen. DivergentImplicit exception has been thrown with -Xdivergence211 turned on: "+ex)
- alt
- } else {
- debugLog("divergent implicit caught: "+ex)
- alt
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
index 013b152e96..013b152e96 100644
--- a/src/compiler/scala/tools/nsc/interactive/InteractiveReporter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala
diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala
new file mode 100644
index 0000000000..c838606f02
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/Main.scala
@@ -0,0 +1,34 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools
+package nsc
+package interactive
+
+/** The main class for NSC, a compiler for the programming
+ * language Scala.
+ */
+object Main extends nsc.MainClass {
+ override def processSettingsHook(): Boolean = {
+ if (this.settings.Yidedebug) {
+ this.settings.Xprintpos.value = true
+ this.settings.Yrangepos.value = true
+ val compiler = new interactive.Global(this.settings, this.reporter)
+ import compiler.{ reporter => _, _ }
+
+ val sfs = command.files map getSourceFile
+ val reloaded = new interactive.Response[Unit]
+ askReload(sfs, reloaded)
+
+ reloaded.get.right.toOption match {
+ case Some(ex) => reporter.cancelled = true // Causes exit code to be non-0
+ case None => reporter.reset() // Causes other compiler errors to be ignored
+ }
+ askShutdown
+ false
+ }
+ else true
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
index 2b389158c3..900a06333d 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -6,12 +6,10 @@ package scala.tools.nsc
package interactive
import util.InterruptReq
-import scala.reflect.internal.util.{SourceFile, BatchSourceFile}
-import io.{AbstractFile, PlainFile}
-
+import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
+import io.{ AbstractFile, PlainFile, Pickler, CondPickler }
import util.EmptyAction
-import scala.reflect.internal.util.{Position, RangePosition, NoPosition, OffsetPosition, TransparentPosition}
-import io.{Pickler, CondPickler}
+import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition }
import io.Pickler._
import scala.collection.mutable
import mutable.ListBuffer
@@ -98,7 +96,7 @@ trait Picklers { self: Global =>
if (!sym.isRoot) {
ownerNames(sym.owner, buf)
buf += (if (sym.isModuleClass) sym.sourceModule else sym).name
- if (!sym.isType && !sym.isStable) {
+ if (!sym.isType && !sym.isStable) { // TODO: what's the reasoning behind this condition!?
val sym1 = sym.owner.info.decl(sym.name)
if (sym1.isOverloaded) {
val index = sym1.alternatives.indexOf(sym)
diff --git a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
index a2d8e5d49a..a2d8e5d49a 100644
--- a/src/compiler/scala/tools/nsc/interactive/PresentationCompilerThread.scala
+++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala
index 7b89d5b0aa..33981771ec 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala
@@ -2,18 +2,15 @@
* Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interactive
-import scala.concurrent.SyncVar
import scala.reflect.internal.util._
-import scala.tools.nsc.symtab._
-import scala.tools.nsc.ast._
import scala.tools.nsc.reporters._
import scala.tools.nsc.io._
import scala.tools.nsc.scratchpad.SourceInserter
-import scala.tools.nsc.interpreter.AbstractFileClassLoader
-import java.io.{File, FileWriter}
+import java.io.FileWriter
/** Interface of interactive compiler to a client such as an IDE
*/
@@ -36,7 +33,7 @@ object REPL {
val settings = new Settings(replError)
reporter = new ConsoleReporter(settings)
val command = new CompilerCommand(args.toList, settings)
- if (command.settings.version.value)
+ if (command.settings.version)
reporter.echo(versionMsg)
else {
try {
@@ -54,22 +51,22 @@ object REPL {
}
} catch {
case ex @ FatalError(msg) =>
- if (true || command.settings.debug.value) // !!!
- ex.printStackTrace();
- reporter.error(null, "fatal error: " + msg)
+ if (true || command.settings.debug) // !!!
+ ex.printStackTrace()
+ reporter.error(null, "fatal error: " + msg)
}
}
}
def main(args: Array[String]) {
process(args)
- /*sys.*/exit(if (reporter.hasErrors) 1 else 0)// Don't use sys yet as this has to run on 2.8.2 also.
+ sys.exit(if (reporter.hasErrors) 1 else 0)
}
def loop(action: (String) => Unit) {
Console.print(prompt)
try {
- val line = Console.readLine
+ val line = Console.readLine()
if (line.length() > 0) {
action(line)
}
@@ -111,7 +108,7 @@ object REPL {
}
def doStructure(file: String) {
- comp.askParsedEntered(toSourceFile(file), false, structureResult)
+ comp.askParsedEntered(toSourceFile(file), keepLoaded = false, structureResult)
show(structureResult)
}
@@ -168,7 +165,7 @@ object REPL {
show(reloadResult)
case "reloadAndAskType" :: file :: millis :: Nil =>
comp.askReload(List(toSourceFile(file)), reloadResult)
- Thread.sleep(millis.toInt)
+ Thread.sleep(millis.toLong)
println("ask type now")
comp.askLoadedTyped(toSourceFile(file), typedResult)
typedResult.get
@@ -186,7 +183,7 @@ object REPL {
println(instrument(arguments, line.toInt))
case List("quit") =>
comp.askShutdown()
- exit(1) // Don't use sys yet as this has to run on 2.8.2 also.
+ sys.exit(1)
case List("structure", file) =>
doStructure(file)
case _ =>
diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
new file mode 100644
index 0000000000..410f919daa
--- /dev/null
+++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala
@@ -0,0 +1,14 @@
+/* NSC -- new Scala compiler
+ * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package interactive
+
+@deprecated("Use scala.reflect.internal.Positions", "2.11.0")
+trait RangePositions extends scala.reflect.internal.Positions with ast.Trees with ast.Positions {
+ self: scala.tools.nsc.Global =>
+
+ override val useOffsetPositions = false
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala
index f36f769ec9..f36f769ec9 100644
--- a/src/compiler/scala/tools/nsc/interactive/Response.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Response.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
index b83c2cd095..b83c2cd095 100644
--- a/src/compiler/scala/tools/nsc/interactive/RichCompilationUnits.scala
+++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
index 7f0265bf4f..ae70a74b60 100644
--- a/src/compiler/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -1,4 +1,5 @@
-package scala.tools.nsc
+package scala
+package tools.nsc
package interactive
import scala.reflect.internal.util.{SourceFile, BatchSourceFile, RangePosition}
@@ -163,7 +164,7 @@ trait ScratchPadMaker { self: Global =>
while (scanner.token != EOF) {
startOffset += scanner.offset
token += scanner.token
- scanner.nextToken
+ scanner.nextToken()
endOffset += scanner.lastOffset
}
@@ -191,7 +192,7 @@ trait ScratchPadMaker { self: Global =>
* prints its output and all defined values in a comment column.
*/
protected def instrument(source: SourceFile, line: Int): (String, Array[Char]) = {
- val tree = typedTree(source, true)
+ val tree = typedTree(source, forceReload = true)
val endOffset = if (line < 0) source.length else source.lineToOffset(line + 1)
val patcher = new Patcher(source.content, new LexicalStructure(source), endOffset)
patcher.traverse(tree)
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index 597b9012ce..f30d896fb7 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -7,14 +7,6 @@ package interactive
package tests
import core._
-
-import java.io.File.pathSeparatorChar
-import java.io.File.separatorChar
-
-import scala.annotation.migration
-import scala.reflect.internal.util.Position
-import scala.reflect.internal.util.SourceFile
-
import scala.collection.mutable.ListBuffer
/** A base class for writing interactive compiler tests.
@@ -82,7 +74,7 @@ abstract class InteractiveTest
/** Test's entry point */
def main(args: Array[String]) {
try execute()
- finally shutdown()
+ finally askShutdown()
}
protected def execute(): Unit = {
@@ -109,6 +101,7 @@ abstract class InteractiveTest
}
/** Perform n random tests with random changes. */
+ /****
private def randomTests(n: Int, files: Array[SourceFile]) {
val tester = new Tester(n, files, settings) {
override val compiler = self.compiler
@@ -116,14 +109,5 @@ abstract class InteractiveTest
}
tester.run()
}
-
- /** shutdown the presentation compiler. */
- protected def shutdown() {
- askShutdown()
-
- // this is actually needed to force exit on test completion.
- // Note: May be a bug on either the testing framework or (less likely)
- // the presentation compiler
- sys.exit(0)
- }
+ ****/
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
index 4d85ab9d88..ad5c61b2b0 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala
@@ -25,7 +25,6 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
* test.
*/
override protected def prepareSettings(settings: Settings) {
- import java.io.File._
def adjustPaths(paths: settings.PathSetting*) {
for (p <- paths if argsString.contains(p.name)) p.value = p.value.map {
case '/' => separatorChar
@@ -45,10 +44,10 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
case _ => ()
}
- // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
+ // Make the --sourcepath path provided in the .flags file (if any) relative to the test's base directory
if(settings.sourcepath.isSetByUser)
settings.sourcepath.value = (baseDir / Path(settings.sourcepath.value)).path
-
+
adjustPaths(settings.bootclasspath, settings.classpath, settings.javabootclasspath, settings.sourcepath)
}
@@ -67,4 +66,4 @@ trait InteractiveTestSettings extends TestSettings with PresentationCompilerInst
reporter.println("\targsString: %s".format(argsString))
super.printClassPath(reporter)
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
index 26aabbd3e6..4390d5dc54 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/Tester.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala
@@ -2,7 +2,8 @@
* Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interactive
package tests
@@ -17,7 +18,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
val compiler = new Global(settings, reporter)
def askAndListen[T, U](msg: String, arg: T, op: (T, Response[U]) => Unit) {
- if (settings.verbose.value) print(msg+" "+arg+": ")
+ if (settings.verbose) print(msg+" "+arg+": ")
val TIMEOUT = 10 // ms
val limit = System.currentTimeMillis() + randomDelayMillis
val res = new Response[U]
@@ -25,10 +26,10 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
while (!res.isComplete && !res.isCancelled) {
if (System.currentTimeMillis() > limit) {
print("c"); res.cancel()
- } else res.get(TIMEOUT) match {
+ } else res.get(TIMEOUT.toLong) match {
case Some(Left(t)) =>
/**/
- if (settings.verbose.value) println(t)
+ if (settings.verbose) println(t)
case Some(Right(ex)) =>
ex.printStackTrace()
println(ex)
@@ -199,7 +200,7 @@ class Tester(ntests: Int, inputs: Array[SourceFile], settings: Settings) {
object Tester {
def main(args: Array[String]) {
val settings = new Settings()
- val (_, filenames) = settings.processArguments(args.toList.tail, true)
+ val (_, filenames) = settings.processArguments(args.toList.tail, processAll = true)
println("filenames = "+filenames)
val files = filenames.toArray map (str => new BatchSourceFile(AbstractFile.getFile(str)): SourceFile)
new Tester(args(0).toInt, files, settings).run()
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
index 8d446cbbf8..8d446cbbf8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
index c8e6b6ccce..e28bf20745 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala
@@ -3,7 +3,6 @@ package interactive
package tests.core
import scala.reflect.internal.util.Position
-import scala.tools.nsc.interactive.tests.core._
/** Set of core test definitions that are executed for each test run. */
private[tests] trait CoreTestDefs
@@ -17,21 +16,18 @@ private[tests] trait CoreTestDefs
extends PresentationCompilerTestDef
with AskCompletionAt {
- def memberPrinter(member: compiler.Member): String =
- "[accessible: %5s] ".format(member.accessible) + "`" + (member.sym.toString() + member.tpe.toString()).trim() + "`"
-
override def runTest() {
askAllSources(CompletionMarker) { pos =>
askCompletionAt(pos)
} { (pos, members) =>
withResponseDelimiter {
- reporter.println("[response] aksTypeCompletion at " + format(pos))
+ reporter.println("[response] askCompletionAt " + format(pos))
// we skip getClass because it changed signature between 1.5 and 1.6, so there is no
// universal check file that we can provide for this to work
reporter.println("retrieved %d members".format(members.size))
compiler ask { () =>
- val filtered = members.filterNot(member => member.sym.name.toString == "getClass" || member.sym.isConstructor)
- reporter.println(filtered.map(memberPrinter).sortBy(_.toString()).mkString("\n"))
+ val filtered = members.filterNot(member => (member.sym.name string_== "getClass") || member.sym.isConstructor)
+ reporter println (filtered.map(_.forceInfoString).sorted mkString "\n")
}
}
}
@@ -49,7 +45,7 @@ private[tests] trait CoreTestDefs
askTypeAt(pos)
} { (pos, tree) =>
withResponseDelimiter {
- reporter.println("[response] askTypeAt at " + format(pos))
+ reporter.println("[response] askTypeAt " + format(pos))
compiler.ask(() => reporter.println(tree))
}
}
@@ -77,7 +73,8 @@ private[tests] trait CoreTestDefs
// askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile!
val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null
val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null
- val sourceFile = sourceFiles.find(_.path == treePath) match {
+
+ sourceFiles.find(_.path == treePath) match {
case Some(source) =>
compiler.askLinkPos(tree.symbol, source, r)
r.get match {
@@ -97,4 +94,4 @@ private[tests] trait CoreTestDefs
}
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index f304eda753..9a2abd5139 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -3,7 +3,6 @@ package interactive
package tests.core
import reporters.{Reporter => CompilerReporter}
-import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
private[tests] trait PresentationCompilerInstance extends TestSettings {
@@ -14,11 +13,16 @@ private[tests] trait PresentationCompilerInstance extends TestSettings {
override def compiler = PresentationCompilerInstance.this.compiler
}
+ private class ScaladocEnabledGlobal extends Global(settings, compilerReporter) {
+ override lazy val analyzer = new {
+ val global: ScaladocEnabledGlobal.this.type = ScaladocEnabledGlobal.this
+ } with InteractiveScaladocAnalyzer
+ }
+
protected lazy val compiler: Global = {
prepareSettings(settings)
- new Global(settings, compilerReporter) {
- override def forScaladoc = withDocComments
- }
+ if (withDocComments) new ScaladocEnabledGlobal
+ else new Global(settings, compilerReporter)
}
/**
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
index b5ae5f2d75..b5ae5f2d75 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
index 9cf2aa4fe4..4d5b4e1129 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala
@@ -1,6 +1,5 @@
package scala.tools.nsc.interactive.tests.core
-import scala.tools.nsc.interactive.Global
import scala.reflect.internal.util.Position
trait PresentationCompilerTestDef {
@@ -16,4 +15,4 @@ trait PresentationCompilerTestDef {
protected def format(pos: Position): String =
(if(pos.isDefined) "(%d,%d)".format(pos.line, pos.column) else "<no position>")
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
index 631504cda5..631504cda5 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/Reporter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
index e80b741a8d..40cfc111a1 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala
@@ -4,7 +4,6 @@ import scala.reflect.internal.util.{SourceFile,BatchSourceFile}
import scala.tools.nsc.io.{AbstractFile,Path}
private[tests] object SourcesCollector {
- import Path._
type SourceFilter = Path => Boolean
/**
@@ -12,11 +11,10 @@ private[tests] object SourcesCollector {
* With the default `filter` only .scala and .java files are collected.
* */
def apply(base: Path, filter: SourceFilter): Array[SourceFile] = {
- assert(base.isDirectory)
+ assert(base.isDirectory, base + " is not a directory")
base.walk.filter(filter).map(source).toList.toArray.sortBy(_.file.name)
}
private def source(file: Path): SourceFile = source(AbstractFile.getFile(file.toFile))
- private def source(filename: String): SourceFile = source(AbstractFile.getFile(filename))
private def source(file: AbstractFile): SourceFile = new BatchSourceFile(file)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
index ba1722382b..a5c228a549 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestMarker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala
@@ -16,7 +16,7 @@ object TestMarker {
}
}
-abstract case class TestMarker(val marker: String) {
+abstract case class TestMarker(marker: String) {
TestMarker.checkForDuplicate(this)
}
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
index 887c3cf29b..887c3cf29b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestResources.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
index 681204172b..681204172b 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/TestSettings.scala
+++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala
diff --git a/src/jline/build.sbt b/src/jline/build.sbt
index 4fc3bab28a..873f7574f1 100644
--- a/src/jline/build.sbt
+++ b/src/jline/build.sbt
@@ -4,9 +4,9 @@ name := "jline"
organization := "org.scala-lang"
-version := "2.10.0-SNAPSHOT"
+version := "2.11.0-SNAPSHOT"
-scalaVersion := "2.9.0-1"
+scalaVersion := "2.10.1"
// Only need these because of weird testing jline issues.
retrieveManaged := true
@@ -14,11 +14,11 @@ retrieveManaged := true
parallelExecution in Test := false
libraryDependencies ++= Seq(
- "org.fusesource.jansi" % "jansi" % "1.4",
- "com.novocode" % "junit-interface" % "0.7" % "test->default"
+ "org.fusesource.jansi" % "jansi" % "1.10",
+ "com.novocode" % "junit-interface" % "0.9" % "test->default"
)
-javacOptions ++= Seq("-target", "1.5")
+javacOptions ++= Seq("-source", "1.5", "-target", "1.5")
proguardOptions ++= Seq(
"-dontshrink",
diff --git a/src/jline/manual-test.sh b/src/jline/manual-test.sh
index aa5131c903..744e1756e8 100755
--- a/src/jline/manual-test.sh
+++ b/src/jline/manual-test.sh
@@ -3,6 +3,7 @@
# Apparently the jline bundled with sbt interferes with testing some
# changes: for instance after changing the keybindings I kept seeing
# failures until I realized what was happening and bypassed sbt, like this.
+CP=lib_managed/jars/com.novocode/junit-interface/junit-interface-0.9.jar:lib_managed/jars/junit/junit-dep/junit-dep-4.8.2.jar:lib_managed/jars/org.fusesource.jansi/jansi/jansi-1.10.jar:lib_managed/jars/org.hamcrest/hamcrest-core/hamcrest-core-1.1.jar:lib_managed/jars/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.10/test-classes:target/scala-2.10/jline_2.10-2.11.0-SNAPSHOT.min.jar
-java -cp lib_managed/jar/com.novocode/junit-interface/junit-interface-0.5.jar:lib_managed/jar/junit/junit/junit-4.8.1.jar:lib_managed/jar/org.fusesource.jansi/jansi/jansi-1.4.jar:lib_managed/jar/org.scala-tools.testing/test-interface/test-interface-0.5.jar:target/scala-2.9.0.1/test-classes:target/scala-2.9.0.1/jline_2.9.0-1-2.10.0-SNAPSHOT.jar \
-org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
+sbt proguard
+java -cp $CP org.junit.runner.JUnitCore scala.tools.jline.console.EditLineTest
diff --git a/src/jline/project/build.properties b/src/jline/project/build.properties
new file mode 100644
index 0000000000..9b860e23c5
--- /dev/null
+++ b/src/jline/project/build.properties
@@ -0,0 +1 @@
+sbt.version=0.12.3
diff --git a/src/jline/project/plugins.sbt b/src/jline/project/plugins.sbt
new file mode 100644
index 0000000000..9c13de92d8
--- /dev/null
+++ b/src/jline/project/plugins.sbt
@@ -0,0 +1,3 @@
+resolvers += Resolver.url("sbt-plugin-releases-scalasbt", url("http://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/"))(Resolver.ivyStylePatterns)
+
+addSbtPlugin("org.scala-sbt" % "xsbt-proguard-plugin" % "0.1.3")
diff --git a/src/jline/project/plugins/build.sbt b/src/jline/project/plugins/build.sbt
deleted file mode 100644
index 0e0f27b1eb..0000000000
--- a/src/jline/project/plugins/build.sbt
+++ /dev/null
@@ -1,5 +0,0 @@
-resolvers += "Proguard plugin repo" at "http://siasia.github.com/maven2"
-
-libraryDependencies <<= (libraryDependencies, appConfiguration) { (deps, app) =>
- deps :+ "com.github.siasia" %% "xsbt-proguard-plugin" % app.provider.id.version
-}
diff --git a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
index 9df42708bb..a375b84a5c 100644
--- a/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
+++ b/src/jline/src/main/java/scala/tools/jline/console/ConsoleReader.java
@@ -1712,7 +1712,7 @@ public class ConsoleReader
}
/**
- * Output a platform-dependant newline.
+ * Output a platform-dependent newline.
*/
public final void println() throws IOException {
print(CR);
diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala
index 07a9ffa8d5..1c25989c30 100644
--- a/src/library-aux/scala/Any.scala
+++ b/src/library-aux/scala/Any.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala
index 7d8b9f9e76..362fbcf0f5 100644
--- a/src/library-aux/scala/AnyRef.scala
+++ b/src/library-aux/scala/AnyRef.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library-aux/scala/Nothing.scala b/src/library-aux/scala/Nothing.scala
index eed6066039..57f6fac3f9 100644
--- a/src/library-aux/scala/Nothing.scala
+++ b/src/library-aux/scala/Nothing.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library-aux/scala/Null.scala b/src/library-aux/scala/Null.scala
index 7455e78ae7..931beb2d1a 100644
--- a/src/library-aux/scala/Null.scala
+++ b/src/library-aux/scala/Null.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2010, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala
index 0d6ba2454c..9def6cb054 100644
--- a/src/library/scala/AnyVal.scala
+++ b/src/library/scala/AnyVal.scala
@@ -52,6 +52,6 @@ package scala
* as well as in [[http://docs.scala-lang.org/sips/pending/value-classes.html SIP-15: Value Classes]],
* the Scala Improvement Proposal.
*/
-abstract class AnyVal extends Any with NotNull {
+abstract class AnyVal extends Any {
def getClass(): Class[_ <: AnyVal] = null
}
diff --git a/src/library/scala/Application.scala b/src/library/scala/Application.scala
deleted file mode 100644
index e7db0d2db8..0000000000
--- a/src/library/scala/Application.scala
+++ /dev/null
@@ -1,79 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-
-import scala.compat.Platform.currentTime
-
-/** The `Application` trait can be used to quickly turn objects
- * into executable programs, but is ''not recommended''.
- * Here is an example:
- * {{{
- * object Main extends Application {
- * Console.println("Hello World!")
- * }
- * }}}
- * Here, object `Main` inherits the `main` method of `Application`.
- * The body of the `Main` object defines the main program. This technique
- * does not work if the main program depends on command-line arguments
- * (which are not accessible with the technique presented here).
- *
- * It is possible to time the execution of objects that inherit from class
- * `Application` by setting the global `scala.time`
- * property. Here is an example for benchmarking object `Main`:
- * {{{
- * java -Dscala.time Main
- * }}}
- * In practice the `Application` trait has a number of serious pitfalls:
- *
- * - Threaded code that references the object will block until static
- * initialization is complete. However, because the entire execution
- * of an `object` extending `Application` takes place during
- * static initialization, concurrent code will ''always'' deadlock if
- * it must synchronize with the enclosing object.
- * - As described above, there is no way to obtain the
- * command-line arguments because all code in body of an `object`
- * extending `Application` is run as part of the static initialization
- * which occurs before `Application`'s `main` method
- * even begins execution.
- * - Static initializers are run only once during program execution, and
- * JVM authors usually assume their execution to be relatively short.
- * Therefore, certain JVM configurations may become confused, or simply
- * fail to optimize or JIT the code in the body of an `object` extending
- * `Application`. This can lead to a significant performance degradation.
- *
- * It is recommended to use the [[scala.App]] trait instead.
- * {{{
- * object Main {
- * def main(args: Array[String]) {
- * //..
- * }
- * }
- * }}}
- *
- * @author Matthias Zenger
- * @version 1.0, 10/09/2003
- */
-@deprecated("use App instead", "2.9.0")
-trait Application {
-
- /** The time when the execution of this program started,
- * in milliseconds since 1 January 1970 UTC. */
- val executionStart: Long = currentTime
-
- /** The default main method.
- *
- * @param args the arguments passed to the main method
- */
- def main(args: Array[String]) {
- if (util.Properties propIsSet "scala.time") {
- val total = currentTime - executionStart
- Console.println("[total " + total + "ms]")
- }
- }
-}
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index b9f51803ec..6ab82d998e 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -240,9 +240,9 @@ object Array extends FallbackArrayBuilding {
*/
def concat[T: ClassTag](xss: Array[T]*): Array[T] = {
val b = newBuilder[T]
- b.sizeHint(xss.map(_.size).sum)
+ b.sizeHint(xss.map(_.length).sum)
for (xs <- xss) b ++= xs
- b.result
+ b.result()
}
/** Returns an array that contains the results of some element computation a number
@@ -267,7 +267,7 @@ object Array extends FallbackArrayBuilding {
b += elem
i += 1
}
- b.result
+ b.result()
}
/** Returns a two-dimensional array that contains the results of some element
@@ -331,7 +331,7 @@ object Array extends FallbackArrayBuilding {
b += f(i)
i += 1
}
- b.result
+ b.result()
}
/** Returns a two-dimensional array containing values of a given function
@@ -399,14 +399,14 @@ object Array extends FallbackArrayBuilding {
def range(start: Int, end: Int, step: Int): Array[Int] = {
if (step == 0) throw new IllegalArgumentException("zero step")
val b = newBuilder[Int]
- b.sizeHint(immutable.Range.count(start, end, step, false))
+ b.sizeHint(immutable.Range.count(start, end, step, isInclusive = false))
var i = start
while (if (step < 0) end < i else i < end) {
b += i
i += step
}
- b.result
+ b.result()
}
/** Returns an array containing repeated applications of a function to a start value.
@@ -431,7 +431,7 @@ object Array extends FallbackArrayBuilding {
b += acc
}
}
- b.result
+ b.result()
}
/** Called in a pattern match like `{ case Array(x,y,z) => println('3 elements')}`.
diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala
index 440e546f19..ddd11257c6 100644
--- a/src/library/scala/Boolean.scala
+++ b/src/library/scala/Boolean.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -116,6 +116,8 @@ object Boolean extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Boolean to be boxed
* @return a java.lang.Boolean offering `x` as its underlying value.
*/
@@ -125,6 +127,8 @@ object Boolean extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Boolean.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToBoolean`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Boolean to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Boolean
* @return the Boolean resulting from calling booleanValue() on `x`
diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala
index df0d2c73b1..2510e859c0 100644
--- a/src/library/scala/Byte.scala
+++ b/src/library/scala/Byte.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -606,6 +606,8 @@ object Byte extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Byte to be boxed
* @return a java.lang.Byte offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Byte extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Byte.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToByte`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Byte to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Byte
* @return the Byte resulting from calling byteValue() on `x`
diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala
index 1fa0c0d9e8..1c9a2ba44f 100644
--- a/src/library/scala/Char.scala
+++ b/src/library/scala/Char.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -606,6 +606,8 @@ object Char extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToCharacter`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Char to be boxed
* @return a java.lang.Character offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Char extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Character.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToChar`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Character to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Character
* @return the Char resulting from calling charValue() on `x`
diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala
index 5b015502ea..275d7629ee 100644
--- a/src/library/scala/Console.scala
+++ b/src/library/scala/Console.scala
@@ -6,16 +6,12 @@
** |/ **
\* */
-
-
package scala
-import java.io.{BufferedReader, InputStream, InputStreamReader,
- IOException, OutputStream, PrintStream, Reader}
-import java.text.MessageFormat
+import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, PrintStream, Reader }
+import scala.io.{ AnsiColor, ReadStdin }
import scala.util.DynamicVariable
-
/** Implements functionality for
* printing Scala values on the terminal as well as reading specific values.
* Also defines constants for marking up text on ANSI terminals.
@@ -23,60 +19,16 @@ import scala.util.DynamicVariable
* @author Matthias Zenger
* @version 1.0, 03/09/2003
*/
-object Console {
-
- /** Foreground color for ANSI black */
- final val BLACK = "\033[30m"
- /** Foreground color for ANSI red */
- final val RED = "\033[31m"
- /** Foreground color for ANSI green */
- final val GREEN = "\033[32m"
- /** Foreground color for ANSI yellow */
- final val YELLOW = "\033[33m"
- /** Foreground color for ANSI blue */
- final val BLUE = "\033[34m"
- /** Foreground color for ANSI magenta */
- final val MAGENTA = "\033[35m"
- /** Foreground color for ANSI cyan */
- final val CYAN = "\033[36m"
- /** Foreground color for ANSI white */
- final val WHITE = "\033[37m"
-
- /** Background color for ANSI black */
- final val BLACK_B = "\033[40m"
- /** Background color for ANSI red */
- final val RED_B = "\033[41m"
- /** Background color for ANSI green */
- final val GREEN_B = "\033[42m"
- /** Background color for ANSI yellow */
- final val YELLOW_B = "\033[43m"
- /** Background color for ANSI blue */
- final val BLUE_B = "\033[44m"
- /** Background color for ANSI magenta */
- final val MAGENTA_B = "\033[45m"
- /** Background color for ANSI cyan */
- final val CYAN_B = "\033[46m"
- /** Background color for ANSI white */
- final val WHITE_B = "\033[47m"
-
- /** Reset ANSI styles */
- final val RESET = "\033[0m"
- /** ANSI bold */
- final val BOLD = "\033[1m"
- /** ANSI underlines */
- final val UNDERLINED = "\033[4m"
- /** ANSI blink */
- final val BLINK = "\033[5m"
- /** ANSI reversed */
- final val REVERSED = "\033[7m"
- /** ANSI invisible */
- final val INVISIBLE = "\033[8m"
-
+object Console extends DeprecatedConsole with AnsiColor {
private val outVar = new DynamicVariable[PrintStream](java.lang.System.out)
private val errVar = new DynamicVariable[PrintStream](java.lang.System.err)
- private val inVar = new DynamicVariable[BufferedReader](
+ private val inVar = new DynamicVariable[BufferedReader](
new BufferedReader(new InputStreamReader(java.lang.System.in)))
+ protected def setOutDirect(out: PrintStream): Unit = outVar.value = out
+ protected def setErrDirect(err: PrintStream): Unit = errVar.value = err
+ protected def setInDirect(in: BufferedReader): Unit = inVar.value = in
+
/** The default output, can be overridden by `setOut` */
def out = outVar.value
/** The default error, can be overridden by `setErr` */
@@ -84,12 +36,6 @@ object Console {
/** The default input, can be overridden by `setIn` */
def in = inVar.value
- /** Sets the default output stream.
- *
- * @param out the new output stream.
- */
- def setOut(out: PrintStream) { outVar.value = out }
-
/** Sets the default output stream for the duration
* of execution of one thunk.
*
@@ -106,13 +52,6 @@ object Console {
def withOut[T](out: PrintStream)(thunk: =>T): T =
outVar.withValue(out)(thunk)
- /** Sets the default output stream.
- *
- * @param out the new output stream.
- */
- def setOut(out: OutputStream): Unit =
- setOut(new PrintStream(out))
-
/** Sets the default output stream for the duration
* of execution of one thunk.
*
@@ -125,13 +64,6 @@ object Console {
def withOut[T](out: OutputStream)(thunk: =>T): T =
withOut(new PrintStream(out))(thunk)
-
- /** Sets the default error stream.
- *
- * @param err the new error stream.
- */
- def setErr(err: PrintStream) { errVar.value = err }
-
/** Set the default error stream for the duration
* of execution of one thunk.
* @example {{{
@@ -147,13 +79,6 @@ object Console {
def withErr[T](err: PrintStream)(thunk: =>T): T =
errVar.withValue(err)(thunk)
- /** Sets the default error stream.
- *
- * @param err the new error stream.
- */
- def setErr(err: OutputStream): Unit =
- setErr(new PrintStream(err))
-
/** Sets the default error stream for the duration
* of execution of one thunk.
*
@@ -166,15 +91,6 @@ object Console {
def withErr[T](err: OutputStream)(thunk: =>T): T =
withErr(new PrintStream(err))(thunk)
-
- /** Sets the default input stream.
- *
- * @param reader specifies the new input stream.
- */
- def setIn(reader: Reader) {
- inVar.value = new BufferedReader(reader)
- }
-
/** Sets the default input stream for the duration
* of execution of one thunk.
*
@@ -195,14 +111,6 @@ object Console {
def withIn[T](reader: Reader)(thunk: =>T): T =
inVar.withValue(new BufferedReader(reader))(thunk)
- /** Sets the default input stream.
- *
- * @param in the new input stream.
- */
- def setIn(in: InputStream) {
- setIn(new InputStreamReader(in))
- }
-
/** Sets the default input stream for the duration
* of execution of one thunk.
*
@@ -251,218 +159,64 @@ object Console {
* @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments
*/
def printf(text: String, args: Any*) { out.print(text format (args : _*)) }
+}
- /** Read a full line from the default input. Returns `null` if the end of the
- * input stream has been reached.
- *
- * @return the string read from the terminal or null if the end of stream was reached.
- */
- def readLine(): String = in.readLine()
-
- /** Print formatted text to the default output and read a full line from the default input.
- * Returns `null` if the end of the input stream has been reached.
- *
- * @param text the format of the text to print out, as in `printf`.
- * @param args the parameters used to instantiate the format, as in `printf`.
- * @return the string read from the default input
- */
- def readLine(text: String, args: Any*): String = {
- printf(text, args: _*)
- readLine()
- }
-
- /** Reads a boolean value from an entire line of the default input.
- * Has a fairly liberal interpretation of the input.
- *
- * @return the boolean value read, or false if it couldn't be converted to a boolean
- * @throws java.io.EOFException if the end of the input stream has been reached.
- */
- def readBoolean(): Boolean = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toLowerCase() match {
- case "true" => true
- case "t" => true
- case "yes" => true
- case "y" => true
- case _ => false
- }
- }
-
- /** Reads a byte value from an entire line of the default input.
- *
- * @return the Byte that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
- */
- def readByte(): Byte = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toByte
- }
-
- /** Reads a short value from an entire line of the default input.
- *
- * @return the short that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
- */
- def readShort(): Short = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toShort
- }
-
- /** Reads a char value from an entire line of the default input.
- *
- * @return the Char that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
- */
- def readChar(): Char = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s charAt 0
- }
-
- /** Reads an int value from an entire line of the default input.
- *
- * @return the Int that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
- */
- def readInt(): Int = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toInt
- }
-
- /** Reads an long value from an entire line of the default input.
- *
- * @return the Long that was read
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
- */
- def readLong(): Long = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toLong
- }
+private[scala] abstract class DeprecatedConsole {
+ self: Console.type =>
+
+ /** Internal usage only. */
+ protected def setOutDirect(out: PrintStream): Unit
+ protected def setErrDirect(err: PrintStream): Unit
+ protected def setInDirect(in: BufferedReader): Unit
+
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readBoolean(): Boolean = ReadStdin.readBoolean()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readByte(): Byte = ReadStdin.readByte()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readChar(): Char = ReadStdin.readChar()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readDouble(): Double = ReadStdin.readDouble()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readFloat(): Float = ReadStdin.readFloat()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readInt(): Int = ReadStdin.readInt()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLine(): String = ReadStdin.readLine()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLine(text: String, args: Any*): String = ReadStdin.readLine(text, args: _*)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readLong(): Long = ReadStdin.readLong()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readShort(): Short = ReadStdin.readShort()
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf(format: String): List[Any] = ReadStdin.readf(format)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf1(format: String): Any = ReadStdin.readf1(format)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf2(format: String): (Any, Any) = ReadStdin.readf2(format)
+ @deprecated("Use the method in scala.io.ReadStdin", "2.11.0") def readf3(format: String): (Any, Any, Any) = ReadStdin.readf3(format)
- /** Reads a float value from an entire line of the default input.
- * @return the Float that was read.
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ /** Sets the default output stream.
*
+ * @param out the new output stream.
*/
- def readFloat(): Float = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toFloat
- }
+ @deprecated("Use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out)
- /** Reads a double value from an entire line of the default input.
+ /** Sets the default output stream.
*
- * @return the Double that was read.
- * @throws java.io.EOFException if the end of the
- * input stream has been reached.
- * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ * @param out the new output stream.
*/
- def readDouble(): Double = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- s.toDouble
- }
+ @deprecated("Use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out))
- /** Reads in some structured input (from the default input), specified by
- * a format specifier. See class `java.text.MessageFormat` for details of
- * the format specification.
+ /** Sets the default error stream.
*
- * @param format the format of the input.
- * @return a list of all extracted values.
- * @throws java.io.EOFException if the end of the input stream has been
- * reached.
+ * @param err the new error stream.
*/
- def readf(format: String): List[Any] = {
- val s = readLine()
- if (s == null)
- throw new java.io.EOFException("Console has reached end of input")
- else
- textComponents(new MessageFormat(format).parse(s))
- }
+ @deprecated("Use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err)
- /** Reads in some structured input (from the default input), specified by
- * a format specifier, returning only the first value extracted, according
- * to the format specification.
+ /** Sets the default error stream.
*
- * @param format format string, as accepted by `readf`.
- * @return The first value that was extracted from the input
+ * @param err the new error stream.
*/
- def readf1(format: String): Any = readf(format).head
+ @deprecated("Use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err))
- /** Reads in some structured input (from the default input), specified
- * by a format specifier, returning only the first two values extracted,
- * according to the format specification.
+ /** Sets the default input stream.
*
- * @param format format string, as accepted by `readf`.
- * @return A [[scala.Tuple2]] containing the first two values extracted
+ * @param reader specifies the new input stream.
*/
- def readf2(format: String): (Any, Any) = {
- val res = readf(format)
- (res.head, res.tail.head)
- }
+ @deprecated("Use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader))
- /** Reads in some structured input (from the default input), specified
- * by a format specifier, returning only the first three values extracted,
- * according to the format specification.
+ /** Sets the default input stream.
*
- * @param format format string, as accepted by `readf`.
- * @return A [[scala.Tuple3]] containing the first three values extracted
+ * @param in the new input stream.
*/
- def readf3(format: String): (Any, Any, Any) = {
- val res = readf(format)
- (res.head, res.tail.head, res.tail.tail.head)
- }
-
- private def textComponents(a: Array[AnyRef]): List[Any] = {
- var i: Int = a.length - 1
- var res: List[Any] = Nil
- while (i >= 0) {
- res = (a(i) match {
- case x: java.lang.Boolean => x.booleanValue()
- case x: java.lang.Byte => x.byteValue()
- case x: java.lang.Short => x.shortValue()
- case x: java.lang.Character => x.charValue()
- case x: java.lang.Integer => x.intValue()
- case x: java.lang.Long => x.longValue()
- case x: java.lang.Float => x.floatValue()
- case x: java.lang.Double => x.doubleValue()
- case x => x
- }) :: res;
- i -= 1
- }
- res
- }
+ @deprecated("Use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in)))
}
diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala
index f058d7c26b..ce081bbec1 100644
--- a/src/library/scala/Double.scala
+++ b/src/library/scala/Double.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -382,6 +382,8 @@ object Double extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Double to be boxed
* @return a java.lang.Double offering `x` as its underlying value.
*/
@@ -391,6 +393,8 @@ object Double extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Double.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToDouble`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Double to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Double
* @return the Double resulting from calling doubleValue() on `x`
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 47d7840e27..59be0cdfa3 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -56,14 +56,6 @@ abstract class Enumeration (initial: Int) extends Serializable {
def this() = this(0)
- @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
- def this(initial: Int, names: String*) = {
- this(initial)
- this.nextName = names.iterator
- }
- @deprecated("Names should be specified individually or discovered via reflection", "2.10.0")
- def this(names: String*) = this(0, names: _*)
-
/* Note that `readResolve` cannot be private, since otherwise
the JVM does not invoke it when deserializing subclasses. */
protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null)
@@ -71,7 +63,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
/** The name of this enumeration.
*/
override def toString =
- ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
+ ((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
Pattern.quote(NAME_JOIN_STRING)).last
/** The mapping from the integer used to identify values to the actual
@@ -103,7 +95,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
protected var nextName: Iterator[String] = _
private def nextNameOrNull =
- if (nextName != null && nextName.hasNext) nextName.next else null
+ if (nextName != null && nextName.hasNext) nextName.next() else null
/** The highest integer amongst those used to identify values in this
* enumeration. */
@@ -126,7 +118,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
*
* @param s an `Enumeration` name
* @return the `Value` of this `Enumeration` if its name matches `s`
- * @throws java.util.NoSuchElementException if no `Value` with a matching
+ * @throws NoSuchElementException if no `Value` with a matching
* name is in this `Enumeration`
*/
final def withName(s: String): Value = values.find(_.toString == s).get
@@ -262,7 +254,8 @@ abstract class Enumeration (initial: Int) extends Serializable {
def contains(v: Value) = nnIds contains (v.id - bottomId)
def + (value: Value) = new ValueSet(nnIds + (value.id - bottomId))
def - (value: Value) = new ValueSet(nnIds - (value.id - bottomId))
- def iterator = nnIds.iterator map (id => thisenum.apply(id + bottomId))
+ def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id))
+ override def keysIteratorFrom(start: Value) = nnIds keysIteratorFrom start.id map (id => thisenum.apply(bottomId + id))
override def stringPrefix = thisenum + ".ValueSet"
/** Creates a bit mask for the zero-adjusted ids in this set as a
* new array of longs */
@@ -284,7 +277,7 @@ abstract class Enumeration (initial: Int) extends Serializable {
def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] {
private[this] val b = new mutable.BitSet
def += (x: Value) = { b += (x.id - bottomId); this }
- def clear() = b.clear
+ def clear() = b.clear()
def result() = new ValueSet(b.toImmutable)
}
/** The implicit builder for value sets */
diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala
index d942acec23..4ff2d509b8 100644
--- a/src/library/scala/Float.scala
+++ b/src/library/scala/Float.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -382,6 +382,8 @@ object Float extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Float to be boxed
* @return a java.lang.Float offering `x` as its underlying value.
*/
@@ -391,6 +393,8 @@ object Float extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Float.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToFloat`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Float to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Float
* @return the Float resulting from calling floatValue() on `x`
diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala
index 5f87b38057..54cba021e0 100644
--- a/src/library/scala/Function0.scala
+++ b/src/library/scala/Function0.scala
@@ -1,12 +1,12 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
// GENERATED CODE: DO NOT EDIT.
-// genprod generated these sources at: Tue Aug 07 11:54:44 CEST 2012
+// genprod generated these sources at: Sun Mar 24 14:14:12 CET 2013
package scala
diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala
index 22393c65dd..2e3de54c5a 100644
--- a/src/library/scala/Function1.scala
+++ b/src/library/scala/Function1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala
index f7e5d414f2..7789970a44 100644
--- a/src/library/scala/Function10.scala
+++ b/src/library/scala/Function10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala
index 53742bf733..d4276f3fd1 100644
--- a/src/library/scala/Function11.scala
+++ b/src/library/scala/Function11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala
index e349d9017d..dfa8bcfce6 100644
--- a/src/library/scala/Function12.scala
+++ b/src/library/scala/Function12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala
index 10ec64b87a..5404c208bf 100644
--- a/src/library/scala/Function13.scala
+++ b/src/library/scala/Function13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala
index 82dd409223..3145290bcf 100644
--- a/src/library/scala/Function14.scala
+++ b/src/library/scala/Function14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala
index be5fbeeca1..309ef53e71 100644
--- a/src/library/scala/Function15.scala
+++ b/src/library/scala/Function15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala
index 7a185b369c..c4cb107e87 100644
--- a/src/library/scala/Function16.scala
+++ b/src/library/scala/Function16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala
index 94e0000802..005ae2ab79 100644
--- a/src/library/scala/Function17.scala
+++ b/src/library/scala/Function17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala
index a3ee6776ba..371630dae3 100644
--- a/src/library/scala/Function18.scala
+++ b/src/library/scala/Function18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala
index 038dcbb778..95c60a467e 100644
--- a/src/library/scala/Function19.scala
+++ b/src/library/scala/Function19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala
index 0794a4048a..5690adb56a 100644
--- a/src/library/scala/Function2.scala
+++ b/src/library/scala/Function2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala
index 727684d6d5..a93f999d44 100644
--- a/src/library/scala/Function20.scala
+++ b/src/library/scala/Function20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala
index 2441278be8..7ebbb06798 100644
--- a/src/library/scala/Function21.scala
+++ b/src/library/scala/Function21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala
index 1f70b190a6..e5a3d83fb9 100644
--- a/src/library/scala/Function22.scala
+++ b/src/library/scala/Function22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala
index bbbde82056..850290d244 100644
--- a/src/library/scala/Function3.scala
+++ b/src/library/scala/Function3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala
index f100860a97..c9ac6df32e 100644
--- a/src/library/scala/Function4.scala
+++ b/src/library/scala/Function4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala
index cba9b6ce52..360a460306 100644
--- a/src/library/scala/Function5.scala
+++ b/src/library/scala/Function5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala
index 0b8addf7de..d30877e765 100644
--- a/src/library/scala/Function6.scala
+++ b/src/library/scala/Function6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala
index 2098658fa9..b19caf2b50 100644
--- a/src/library/scala/Function7.scala
+++ b/src/library/scala/Function7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala
index 08a480dce5..3aff0b034c 100644
--- a/src/library/scala/Function8.scala
+++ b/src/library/scala/Function8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala
index 2e35f7949c..f80ccf48f9 100644
--- a/src/library/scala/Function9.scala
+++ b/src/library/scala/Function9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala
index ae36413469..6a27195b10 100644
--- a/src/library/scala/Int.scala
+++ b/src/library/scala/Int.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -606,6 +606,8 @@ object Int extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToInteger`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Int to be boxed
* @return a java.lang.Integer offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Int extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Integer.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToInt`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Integer to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Integer
* @return the Int resulting from calling intValue() on `x`
diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala
index 4ee9383c2a..4d369ae010 100644
--- a/src/library/scala/Long.scala
+++ b/src/library/scala/Long.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -606,6 +606,8 @@ object Long extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Long to be boxed
* @return a java.lang.Long offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Long extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Long.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToLong`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Long to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Long
* @return the Long resulting from calling longValue() on `x`
diff --git a/src/library/scala/LowPriorityImplicits.scala b/src/library/scala/LowPriorityImplicits.scala
deleted file mode 100644
index bf6e494c11..0000000000
--- a/src/library/scala/LowPriorityImplicits.scala
+++ /dev/null
@@ -1,95 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala
-
-import scala.collection.{ mutable, immutable, generic }
-import mutable.WrappedArray
-import immutable.WrappedString
-import generic.CanBuildFrom
-import scala.language.implicitConversions
-
-/** The `LowPriorityImplicits` class provides implicit values that
- * are valid in all Scala compilation units without explicit qualification,
- * but that are partially overridden by higher-priority conversions in object
- * `Predef`.
- *
- * @author Martin Odersky
- * @since 2.8
- */
-class LowPriorityImplicits {
- /** We prefer the java.lang.* boxed types to these wrappers in
- * any potential conflicts. Conflicts do exist because the wrappers
- * need to implement ScalaNumber in order to have a symmetric equals
- * method, but that implies implementing java.lang.Number as well.
- *
- * Note - these are inlined because they are value classes, but
- * the call to xxxWrapper is not eliminated even though it does nothing.
- * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
- * because maybe loading Predef has side effects!
- */
- @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
- @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
- @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
- @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
- @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
- @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
- @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
- @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
-
- // These eight implicits exist solely to exclude Null from the domain of
- // the boxed types, so that e.g. "var x: Int = null" is a compile time
- // error rather than a delayed null pointer exception by way of the
- // conversion from java.lang.Integer. If defined in the same file as
- // Integer2int, they would have higher priority because Null is a subtype
- // of Integer. We balance that out and create conflict by moving the
- // definition into the superclass.
- //
- // Caution: do not adjust tightrope tension without safety goggles in place.
- implicit def Byte2byteNullConflict(x: Null): Byte = sys.error("value error")
- implicit def Short2shortNullConflict(x: Null): Short = sys.error("value error")
- implicit def Character2charNullConflict(x: Null): Char = sys.error("value error")
- implicit def Integer2intNullConflict(x: Null): Int = sys.error("value error")
- implicit def Long2longNullConflict(x: Null): Long = sys.error("value error")
- implicit def Float2floatNullConflict(x: Null): Float = sys.error("value error")
- implicit def Double2doubleNullConflict(x: Null): Double = sys.error("value error")
- implicit def Boolean2booleanNullConflict(x: Null): Boolean = sys.error("value error")
-
- implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
- if (xs eq null) null
- else WrappedArray.make(xs)
-
- // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
- // is as good as another for all T <: AnyRef. Instead of creating 100,000,000
- // unique ones by way of this implicit, let's share one.
- implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
- if (xs eq null) null
- else if (xs.length == 0) WrappedArray.empty[T]
- else new WrappedArray.ofRef[T](xs)
- }
-
- implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
- implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
- implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
- implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
- implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
- implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
- implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
- implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
- implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
-
- implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
- implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
-
- implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
- new CanBuildFrom[String, T, immutable.IndexedSeq[T]] {
- def apply(from: String) = immutable.IndexedSeq.newBuilder[T]
- def apply() = immutable.IndexedSeq.newBuilder[T]
- }
-}
-
diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala
index f87416b49d..3cbe9ed4ac 100644
--- a/src/library/scala/NotNull.scala
+++ b/src/library/scala/NotNull.scala
@@ -12,4 +12,6 @@ package scala
* A marker trait for things that are not allowed to be null
* @since 2.5
*/
+
+@deprecated("This trait will be removed", "2.11.0")
trait NotNull extends Any {}
diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala
index 3873df99e9..4b071166c7 100644
--- a/src/library/scala/Option.scala
+++ b/src/library/scala/Option.scala
@@ -209,6 +209,15 @@ sealed abstract class Option[+A] extends Product with Serializable {
def withFilter(q: A => Boolean): WithFilter = new WithFilter(x => p(x) && q(x))
}
+ /** Tests whether the option contains a given value as an element.
+ *
+ * @param elem the element to test.
+ * @return `true` if the option has an element that is equal (as
+ * determined by `==`) to `elem`, `false` otherwise.
+ */
+ final def contains[A1 >: A](elem: A1): Boolean =
+ !isEmpty && this.get == elem
+
/** Returns true if this option is nonempty '''and''' the predicate
* $p returns true when applied to this $option's value.
* Otherwise, returns false.
@@ -247,7 +256,7 @@ sealed abstract class Option[+A] extends Product with Serializable {
* value (if possible), or $none.
*/
@inline final def collect[B](pf: PartialFunction[A, B]): Option[B] =
- if (!isEmpty && pf.isDefinedAt(this.get)) Some(pf(this.get)) else None
+ if (!isEmpty) pf.lift(this.get) else None
/** Returns this $option if it is nonempty,
* otherwise return the result of evaluating `alternative`.
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 9bb57877d9..569157de20 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -15,6 +15,7 @@ import generic.CanBuildFrom
import scala.annotation.{ elidable, implicitNotFound }
import scala.annotation.elidable.ASSERTION
import scala.language.{implicitConversions, existentials}
+import scala.io.ReadStdin
/** The `Predef` object provides definitions that are accessible in all Scala
* compilation units without explicit qualification.
@@ -68,7 +69,7 @@ import scala.language.{implicitConversions, existentials}
* Short value to a Long value as required, and to add additional higher-order
* functions to Array values. These are described in more detail in the documentation of [[scala.Array]].
*/
-object Predef extends LowPriorityImplicits {
+object Predef extends LowPriorityImplicits with DeprecatedPredef {
/**
* Retrieve the runtime representation of a class type. `classOf[T]` is equivalent to
* the class literal `T.class` in Java.
@@ -101,19 +102,19 @@ object Predef extends LowPriorityImplicits {
// Manifest types, companions, and incantations for summoning
@annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.")
- @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
type ClassManifest[T] = scala.reflect.ClassManifest[T]
// TODO undeprecated until Scala reflection becomes non-experimental
// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
type OptManifest[T] = scala.reflect.OptManifest[T]
@annotation.implicitNotFound(msg = "No Manifest available for ${T}.")
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
type Manifest[T] = scala.reflect.Manifest[T]
- @deprecated("Use scala.reflect.ClassTag instead", "2.10.0")
+ @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0")
val ClassManifest = scala.reflect.ClassManifest
// TODO undeprecated until Scala reflection becomes non-experimental
- // @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
+ // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0")
val Manifest = scala.reflect.Manifest
// TODO undeprecated until Scala reflection becomes non-experimental
// @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0")
@@ -136,22 +137,14 @@ object Predef extends LowPriorityImplicits {
// Apparently needed for the xml library
val $scope = scala.xml.TopScope
- // Deprecated
+ // errors and asserts -------------------------------------------------
+ // !!! Remove this when possible - ideally for 2.11.
+ // We are stuck with it a while longer because sbt's compiler interface
+ // still calls it as of 0.12.2.
@deprecated("Use `sys.error(message)` instead", "2.9.0")
def error(message: String): Nothing = sys.error(message)
- @deprecated("Use `sys.exit()` instead", "2.9.0")
- def exit(): Nothing = sys.exit()
-
- @deprecated("Use `sys.exit(status)` instead", "2.9.0")
- def exit(status: Int): Nothing = sys.exit(status)
-
- @deprecated("Use `formatString.format(args: _*)` or `arg.formatted(formatString)` instead", "2.9.0")
- def format(text: String, xs: Any*) = augmentString(text).format(xs: _*)
-
- // errors and asserts -------------------------------------------------
-
/** Tests an expression, throwing an `AssertionError` if false.
* Calls to this method will not be generated if `-Xelide-below`
* is at least `ASSERTION`.
@@ -233,19 +226,6 @@ object Predef extends LowPriorityImplicits {
throw new IllegalArgumentException("requirement failed: "+ message)
}
- final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
- // `__resultOfEnsuring` must be a public val to allow inlining.
- // See comments in ArrowAssoc for more.
- @deprecated("Use `__resultOfEnsuring` instead", "2.10.0")
- def x = __resultOfEnsuring
-
- def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
- def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring }
- def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring }
- def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring }
- }
- @inline implicit def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
-
/** `???` can be used for marking methods that remain to be implemented.
* @throws A `NotImplementedError`
*/
@@ -265,20 +245,58 @@ object Predef extends LowPriorityImplicits {
def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
}
- final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
- // `__leftOfArrow` must be a public val to allow inlining. The val
- // used to be called `x`, but now goes by `__leftOfArrow`, as that
- // reduces the chances of a user's writing `foo.__leftOfArrow` and
- // being confused why they get an ambiguous implicit conversion
- // error. (`foo.x` used to produce this error since both
- // any2Ensuring and any2ArrowAssoc pimped an `x` onto everything)
- @deprecated("Use `__leftOfArrow` instead", "2.10.0")
- def x = __leftOfArrow
+ // implicit classes -----------------------------------------------------
+ implicit final class ArrowAssoc[A](val __leftOfArrow: A) extends AnyVal {
@inline def -> [B](y: B): Tuple2[A, B] = Tuple2(__leftOfArrow, y)
def →[B](y: B): Tuple2[A, B] = ->(y)
}
- @inline implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+
+ implicit final class Ensuring[A](val __resultOfEnsuring: A) extends AnyVal {
+ def ensuring(cond: Boolean): A = { assert(cond); __resultOfEnsuring }
+ def ensuring(cond: Boolean, msg: => Any): A = { assert(cond, msg); __resultOfEnsuring }
+ def ensuring(cond: A => Boolean): A = { assert(cond(__resultOfEnsuring)); __resultOfEnsuring }
+ def ensuring(cond: A => Boolean, msg: => Any): A = { assert(cond(__resultOfEnsuring), msg); __resultOfEnsuring }
+ }
+
+ implicit final class StringFormat[A](val __stringToFormat: A) extends AnyVal {
+ /** Returns string formatted according to given `format` string.
+ * Format strings are as for `String.format`
+ * (@see java.lang.String.format).
+ */
+ @inline def formatted(fmtstr: String): String = fmtstr format __stringToFormat
+ }
+
+ implicit final class StringAdd[A](val __thingToAdd: A) extends AnyVal {
+ def +(other: String) = String.valueOf(__thingToAdd) + other
+ }
+
+ implicit final class RichException(val __throwableToEnrich: Throwable) extends AnyVal {
+ import scala.compat.Platform.EOL
+ @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = __throwableToEnrich.getStackTrace().mkString("", EOL, EOL)
+ }
+
+ implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence {
+ def length: Int = __sequenceOfChars.length
+ def charAt(index: Int): Char = __sequenceOfChars(index)
+ def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end))
+ override def toString = __sequenceOfChars mkString ""
+ }
+
+ implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence {
+ def length: Int = __arrayOfChars.length
+ def charAt(index: Int): Char = __arrayOfChars(index)
+ def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end)
+ override def toString = __arrayOfChars mkString ""
+ }
+
+ implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
+ def apply(from: String) = apply()
+ def apply() = mutable.StringBuilder.newBuilder
+ }
+
+ @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
+ @inline implicit def unaugmentString(x: StringOps): String = x.repr
// printing and reading -----------------------------------------------
@@ -287,28 +305,10 @@ object Predef extends LowPriorityImplicits {
def println(x: Any) = Console.println(x)
def printf(text: String, xs: Any*) = Console.print(text.format(xs: _*))
- def readLine(): String = Console.readLine()
- def readLine(text: String, args: Any*) = Console.readLine(text, args: _*)
- def readBoolean() = Console.readBoolean()
- def readByte() = Console.readByte()
- def readShort() = Console.readShort()
- def readChar() = Console.readChar()
- def readInt() = Console.readInt()
- def readLong() = Console.readLong()
- def readFloat() = Console.readFloat()
- def readDouble() = Console.readDouble()
- def readf(format: String) = Console.readf(format)
- def readf1(format: String) = Console.readf1(format)
- def readf2(format: String) = Console.readf2(format)
- def readf3(format: String) = Console.readf3(format)
-
// views --------------------------------------------------------------
- implicit def exceptionWrapper(exc: Throwable) = new runtime.RichException(exc)
implicit def tuple2ToZippedOps[T1, T2](x: (T1, T2)) = new runtime.Tuple2Zipped.Ops(x)
implicit def tuple3ToZippedOps[T1, T2, T3](x: (T1, T2, T3)) = new runtime.Tuple3Zipped.Ops(x)
- implicit def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new runtime.SeqCharSequence(xs)
- implicit def arrayToCharSequence(xs: Array[Char]): CharSequence = new runtime.ArrayCharSequence(xs, 0, xs.length)
implicit def genericArrayOps[T](xs: Array[T]): ArrayOps[T] = (xs match {
case x: Array[AnyRef] => refArrayOps[AnyRef](x)
@@ -335,33 +335,6 @@ object Predef extends LowPriorityImplicits {
implicit def shortArrayOps(xs: Array[Short]): ArrayOps[Short] = new ArrayOps.ofShort(xs)
implicit def unitArrayOps(xs: Array[Unit]): ArrayOps[Unit] = new ArrayOps.ofUnit(xs)
- // Primitive Widenings --------------------------------------------------------------
-
- @deprecated("Use `.toShort` for explicit conversion and `Byte.byte2short` for implicit conversion", "2.10.0") def byte2short(x: Byte): Short = x.toShort
- @deprecated("Use `.toInt` for explicit conversion and `Byte.byte2int` for implicit conversion", "2.10.0") def byte2int(x: Byte): Int = x.toInt
- @deprecated("Use `.toLong` for explicit conversion and `Byte.byte2long for implicit conversion", "2.10.0") def byte2long(x: Byte): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Byte.byte2float` for implicit conversion", "2.10.0") def byte2float(x: Byte): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Byte.byte2double` for implicit conversion", "2.10.0") def byte2double(x: Byte): Double = x.toDouble
-
- @deprecated("Use `.toInt` for explicit conversion and `Short.short2int` for implicit conversion", "2.10.0") def short2int(x: Short): Int = x.toInt
- @deprecated("Use `.toLong` for explicit conversion and `Short.short2long` for implicit conversion", "2.10.0") def short2long(x: Short): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Short.short2float` for implicit conversion", "2.10.0") def short2float(x: Short): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Short.short2double` for implicit conversion", "2.10.0") def short2double(x: Short): Double = x.toDouble
-
- @deprecated("Use `.toInt` for explicit conversion and `Char.char2int` for implicit conversion", "2.10.0") def char2int(x: Char): Int = x.toInt
- @deprecated("Use `.toLong` for explicit conversion and `Char.char2long` for implicit conversion", "2.10.0") def char2long(x: Char): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Char.char2float` for implicit conversion", "2.10.0") def char2float(x: Char): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Char.char2double` for implicit conversion", "2.10.0") def char2double(x: Char): Double = x.toDouble
-
- @deprecated("Use `.toLong` for explicit conversion and `Int.int2long` for implicit conversion", "2.10.0") def int2long(x: Int): Long = x.toLong
- @deprecated("Use `.toFloat` for explicit conversion and `Int.int2float` for implicit conversion", "2.10.0") def int2float(x: Int): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Int.int2double` for implicit conversion", "2.10.0") def int2double(x: Int): Double = x.toDouble
-
- @deprecated("Use `.toFloat` for explicit conversion and `Long.long2float` for implicit conversion", "2.10.0") def long2float(x: Long): Float = x.toFloat
- @deprecated("Use `.toDouble` for explicit conversion and `Long.long2double` for implicit conversion", "2.10.0") def long2double(x: Long): Double = x.toDouble
-
- @deprecated("Use `.toDouble` for explicit conversion and `Float.float2double` for implicit conversion", "2.10.0") def float2double(x: Float): Double = x.toDouble
-
// "Autoboxing" and "Autounboxing" ---------------------------------------------------
implicit def byte2Byte(x: Byte) = java.lang.Byte.valueOf(x)
@@ -395,21 +368,6 @@ object Predef extends LowPriorityImplicits {
implicit def Double2double(x: java.lang.Double): Double = x.doubleValue
implicit def Boolean2boolean(x: java.lang.Boolean): Boolean = x.booleanValue
- // Strings and CharSequences --------------------------------------------------------------
-
- @inline implicit def any2stringfmt(x: Any) = new runtime.StringFormat(x)
- @inline implicit def augmentString(x: String): StringOps = new StringOps(x)
- implicit def any2stringadd(x: Any) = new runtime.StringAdd(x)
- implicit def unaugmentString(x: StringOps): String = x.repr
-
- @deprecated("Use `StringCanBuildFrom`", "2.10.0")
- def stringCanBuildFrom: CanBuildFrom[String, Char, String] = StringCanBuildFrom
-
- implicit val StringCanBuildFrom: CanBuildFrom[String, Char, String] = new CanBuildFrom[String, Char, String] {
- def apply(from: String) = apply()
- def apply() = mutable.StringBuilder.newBuilder
- }
-
// Type Constraints --------------------------------------------------------------
/**
@@ -460,3 +418,116 @@ object Predef extends LowPriorityImplicits {
implicit def dummyImplicit: DummyImplicit = new DummyImplicit
}
}
+
+private[scala] trait DeprecatedPredef {
+ self: Predef.type =>
+
+ // Deprecated stubs for any who may have been calling these methods directly.
+ @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
+ @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x)
+ @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x)
+ @deprecated("Use String interpolation", "2.11.0") def any2stringadd(x: Any): StringAdd[Any] = new StringAdd(x)
+ @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc)
+ @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs)
+ @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs)
+
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLine(): String = ReadStdin.readLine()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLine(text: String, args: Any*) = ReadStdin.readLine(text, args: _*)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readBoolean() = ReadStdin.readBoolean()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readByte() = ReadStdin.readByte()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readShort() = ReadStdin.readShort()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readChar() = ReadStdin.readChar()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readInt() = ReadStdin.readInt()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readLong() = ReadStdin.readLong()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readFloat() = ReadStdin.readFloat()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readDouble() = ReadStdin.readDouble()
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf(format: String) = ReadStdin.readf(format)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf1(format: String) = ReadStdin.readf1(format)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf2(format: String) = ReadStdin.readf2(format)
+ @deprecated("Use the method in `scala.io.ReadStdin`", "2.11.0") def readf3(format: String) = ReadStdin.readf3(format)
+}
+
+/** The `LowPriorityImplicits` class provides implicit values that
+* are valid in all Scala compilation units without explicit qualification,
+* but that are partially overridden by higher-priority conversions in object
+* `Predef`.
+*
+* @author Martin Odersky
+* @since 2.8
+*/
+// SI-7335 Parents of Predef are defined in the same compilation unit to avoid
+// cyclic reference errors compiling the standard library *without* a previously
+// compiled copy on the classpath.
+private[scala] abstract class LowPriorityImplicits {
+ import mutable.WrappedArray
+ import immutable.WrappedString
+
+ /** We prefer the java.lang.* boxed types to these wrappers in
+ * any potential conflicts. Conflicts do exist because the wrappers
+ * need to implement ScalaNumber in order to have a symmetric equals
+ * method, but that implies implementing java.lang.Number as well.
+ *
+ * Note - these are inlined because they are value classes, but
+ * the call to xxxWrapper is not eliminated even though it does nothing.
+ * Even inlined, every call site does a no-op retrieval of Predef's MODULE$
+ * because maybe loading Predef has side effects!
+ */
+ @inline implicit def byteWrapper(x: Byte) = new runtime.RichByte(x)
+ @inline implicit def shortWrapper(x: Short) = new runtime.RichShort(x)
+ @inline implicit def intWrapper(x: Int) = new runtime.RichInt(x)
+ @inline implicit def charWrapper(c: Char) = new runtime.RichChar(c)
+ @inline implicit def longWrapper(x: Long) = new runtime.RichLong(x)
+ @inline implicit def floatWrapper(x: Float) = new runtime.RichFloat(x)
+ @inline implicit def doubleWrapper(x: Double) = new runtime.RichDouble(x)
+ @inline implicit def booleanWrapper(x: Boolean) = new runtime.RichBoolean(x)
+
+ // These eight implicits exist solely to exclude Null from the domain of
+ // the boxed types, so that e.g. "var x: Int = null" is a compile time
+ // error rather than a delayed null pointer exception by way of the
+ // conversion from java.lang.Integer. If defined in the same template as
+ // Integer2int, they would have higher priority because Null is a subtype
+ // of Integer. We balance that out and create conflict by moving the
+ // definition into the superclass.
+ //
+ // Caution: do not adjust tightrope tension without safety goggles in place.
+ implicit def Byte2byteNullConflict(x: Null): Byte = sys.error("value error")
+ implicit def Short2shortNullConflict(x: Null): Short = sys.error("value error")
+ implicit def Character2charNullConflict(x: Null): Char = sys.error("value error")
+ implicit def Integer2intNullConflict(x: Null): Int = sys.error("value error")
+ implicit def Long2longNullConflict(x: Null): Long = sys.error("value error")
+ implicit def Float2floatNullConflict(x: Null): Float = sys.error("value error")
+ implicit def Double2doubleNullConflict(x: Null): Double = sys.error("value error")
+ implicit def Boolean2booleanNullConflict(x: Null): Boolean = sys.error("value error")
+
+ implicit def genericWrapArray[T](xs: Array[T]): WrappedArray[T] =
+ if (xs eq null) null
+ else WrappedArray.make(xs)
+
+ // Since the JVM thinks arrays are covariant, one 0-length Array[AnyRef]
+ // is as good as another for all T <: AnyRef. Instead of creating 100,000,000
+ // unique ones by way of this implicit, let's share one.
+ implicit def wrapRefArray[T <: AnyRef](xs: Array[T]): WrappedArray[T] = {
+ if (xs eq null) null
+ else if (xs.length == 0) WrappedArray.empty[T]
+ else new WrappedArray.ofRef[T](xs)
+ }
+
+ implicit def wrapIntArray(xs: Array[Int]): WrappedArray[Int] = if (xs ne null) new WrappedArray.ofInt(xs) else null
+ implicit def wrapDoubleArray(xs: Array[Double]): WrappedArray[Double] = if (xs ne null) new WrappedArray.ofDouble(xs) else null
+ implicit def wrapLongArray(xs: Array[Long]): WrappedArray[Long] = if (xs ne null) new WrappedArray.ofLong(xs) else null
+ implicit def wrapFloatArray(xs: Array[Float]): WrappedArray[Float] = if (xs ne null) new WrappedArray.ofFloat(xs) else null
+ implicit def wrapCharArray(xs: Array[Char]): WrappedArray[Char] = if (xs ne null) new WrappedArray.ofChar(xs) else null
+ implicit def wrapByteArray(xs: Array[Byte]): WrappedArray[Byte] = if (xs ne null) new WrappedArray.ofByte(xs) else null
+ implicit def wrapShortArray(xs: Array[Short]): WrappedArray[Short] = if (xs ne null) new WrappedArray.ofShort(xs) else null
+ implicit def wrapBooleanArray(xs: Array[Boolean]): WrappedArray[Boolean] = if (xs ne null) new WrappedArray.ofBoolean(xs) else null
+ implicit def wrapUnitArray(xs: Array[Unit]): WrappedArray[Unit] = if (xs ne null) new WrappedArray.ofUnit(xs) else null
+
+ implicit def wrapString(s: String): WrappedString = if (s ne null) new WrappedString(s) else null
+ implicit def unwrapString(ws: WrappedString): String = if (ws ne null) ws.self else null
+
+ implicit def fallbackStringCanBuildFrom[T]: CanBuildFrom[String, T, immutable.IndexedSeq[T]] =
+ new CanBuildFrom[String, T, immutable.IndexedSeq[T]] {
+ def apply(from: String) = immutable.IndexedSeq.newBuilder[T]
+ def apply() = immutable.IndexedSeq.newBuilder[T]
+ }
+}
diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala
index 2c6838f6b3..0798587772 100644
--- a/src/library/scala/Product.scala
+++ b/src/library/scala/Product.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala
index edd095c5c6..dbc34ba66a 100644
--- a/src/library/scala/Product1.scala
+++ b/src/library/scala/Product1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala
index 8daefde699..70de79d49a 100644
--- a/src/library/scala/Product10.scala
+++ b/src/library/scala/Product10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala
index 90b4e8013e..1bb79ac017 100644
--- a/src/library/scala/Product11.scala
+++ b/src/library/scala/Product11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala
index d5997ea05a..d7e1e1b05c 100644
--- a/src/library/scala/Product12.scala
+++ b/src/library/scala/Product12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala
index db8e0f3722..8571b45a40 100644
--- a/src/library/scala/Product13.scala
+++ b/src/library/scala/Product13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala
index 113c07e8c4..a2f5140370 100644
--- a/src/library/scala/Product14.scala
+++ b/src/library/scala/Product14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala
index a6ad9c7594..1c6ad0011c 100644
--- a/src/library/scala/Product15.scala
+++ b/src/library/scala/Product15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala
index cbf47ece94..f03b0b34a2 100644
--- a/src/library/scala/Product16.scala
+++ b/src/library/scala/Product16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala
index f56836bfd8..72df1b496a 100644
--- a/src/library/scala/Product17.scala
+++ b/src/library/scala/Product17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala
index 5b86bcff65..0402f90a01 100644
--- a/src/library/scala/Product18.scala
+++ b/src/library/scala/Product18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala
index ed4bf36c93..b9770db47b 100644
--- a/src/library/scala/Product19.scala
+++ b/src/library/scala/Product19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala
index e27e54eff9..a43a4a285c 100644
--- a/src/library/scala/Product2.scala
+++ b/src/library/scala/Product2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala
index 47437a20af..7b0df201ec 100644
--- a/src/library/scala/Product20.scala
+++ b/src/library/scala/Product20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala
index 319d2725c0..f81347aac0 100644
--- a/src/library/scala/Product21.scala
+++ b/src/library/scala/Product21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala
index 6ab3737acd..7a25891c6e 100644
--- a/src/library/scala/Product22.scala
+++ b/src/library/scala/Product22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala
index 1cfbd7956b..9976240935 100644
--- a/src/library/scala/Product3.scala
+++ b/src/library/scala/Product3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala
index 843571fd60..d6c1543390 100644
--- a/src/library/scala/Product4.scala
+++ b/src/library/scala/Product4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala
index df73bba3dd..5f1b11a30d 100644
--- a/src/library/scala/Product5.scala
+++ b/src/library/scala/Product5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala
index 36906ca54e..efd9408d73 100644
--- a/src/library/scala/Product6.scala
+++ b/src/library/scala/Product6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala
index e7b2c13ad1..fab0a997a1 100644
--- a/src/library/scala/Product7.scala
+++ b/src/library/scala/Product7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala
index 916e57ec39..41391f7050 100644
--- a/src/library/scala/Product8.scala
+++ b/src/library/scala/Product8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala
index d5e72edc0b..e22538e1ee 100644
--- a/src/library/scala/Product9.scala
+++ b/src/library/scala/Product9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala
index 1f7d047060..77094f0bbf 100644
--- a/src/library/scala/SerialVersionUID.scala
+++ b/src/library/scala/SerialVersionUID.scala
@@ -12,4 +12,4 @@ package scala
* Annotation for specifying the `static SerialVersionUID` field
* of a serializable class.
*/
-class SerialVersionUID(uid: Long) extends scala.annotation.StaticAnnotation
+class SerialVersionUID(value: Long) extends scala.annotation.ClassfileAnnotation
diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala
index 35c5fe3ff0..4f91c51550 100644
--- a/src/library/scala/Short.scala
+++ b/src/library/scala/Short.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -606,6 +606,8 @@ object Short extends AnyValCompanion {
/** Transform a value type into a boxed reference type.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.boxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the Short to be boxed
* @return a java.lang.Short offering `x` as its underlying value.
*/
@@ -615,6 +617,8 @@ object Short extends AnyValCompanion {
* method is not typesafe: it accepts any Object, but will throw
* an exception if the argument is not a java.lang.Short.
*
+ * Runtime implementation determined by `scala.runtime.BoxesRunTime.unboxToShort`. See [[https://github.com/scala/scala src/library/scala/runtime/BoxesRunTime.java]].
+ *
* @param x the java.lang.Short to be unboxed.
* @throws ClassCastException if the argument is not a java.lang.Short
* @return the Short resulting from calling shortValue() on `x`
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index c7a6091a65..137598c28d 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -11,7 +11,7 @@ package scala
/** A common supertype for companions of specializable types.
* Should not be extended in user code.
*/
-trait Specializable extends SpecializableCompanion
+trait Specializable
object Specializable {
// No type parameter in @specialized annotation.
diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala
index 1b5fd6c8d3..42fb2f36e8 100644
--- a/src/library/scala/StringContext.scala
+++ b/src/library/scala/StringContext.scala
@@ -162,7 +162,7 @@ case class StringContext(parts: String*) {
*/
// The implementation is hardwired to `scala.tools.reflect.MacroImplementations.macro_StringInterpolation_f`
// Using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def f(args: Any*): String = ??? // macro
+ def f(args: Any*): String = macro ???
}
object StringContext {
diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala
index 02fdd0cba5..5898b63e21 100644
--- a/src/library/scala/Tuple1.scala
+++ b/src/library/scala/Tuple1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -15,6 +15,7 @@ package scala
* @constructor Create a new tuple with 1 elements.
* @param _1 Element 1 of this Tuple1
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1)
extends Product1[T1]
{
diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala
index ba2a02a8b2..2b0239561d 100644
--- a/src/library/scala/Tuple10.scala
+++ b/src/library/scala/Tuple10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,6 +24,7 @@ package scala
* @param _9 Element 9 of this Tuple10
* @param _10 Element 10 of this Tuple10
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10)
extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]
{
diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala
index 7f51d172d4..0d5294d547 100644
--- a/src/library/scala/Tuple11.scala
+++ b/src/library/scala/Tuple11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -25,6 +25,7 @@ package scala
* @param _10 Element 10 of this Tuple11
* @param _11 Element 11 of this Tuple11
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11)
extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]
{
diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala
index 4bbc6a0eab..d36c8275c1 100644
--- a/src/library/scala/Tuple12.scala
+++ b/src/library/scala/Tuple12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -26,6 +26,7 @@ package scala
* @param _11 Element 11 of this Tuple12
* @param _12 Element 12 of this Tuple12
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12)
extends Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]
{
diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala
index 77bd59bf2e..edc37456fe 100644
--- a/src/library/scala/Tuple13.scala
+++ b/src/library/scala/Tuple13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -27,6 +27,7 @@ package scala
* @param _12 Element 12 of this Tuple13
* @param _13 Element 13 of this Tuple13
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13)
extends Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]
{
diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala
index bf7a4ce016..9896e736c9 100644
--- a/src/library/scala/Tuple14.scala
+++ b/src/library/scala/Tuple14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -28,6 +28,7 @@ package scala
* @param _13 Element 13 of this Tuple14
* @param _14 Element 14 of this Tuple14
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14)
extends Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]
{
diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala
index 582c359bc6..45cd4f751f 100644
--- a/src/library/scala/Tuple15.scala
+++ b/src/library/scala/Tuple15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -29,6 +29,7 @@ package scala
* @param _14 Element 14 of this Tuple15
* @param _15 Element 15 of this Tuple15
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15)
extends Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]
{
diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala
index a1e9a790ff..2e370a5b31 100644
--- a/src/library/scala/Tuple16.scala
+++ b/src/library/scala/Tuple16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -30,6 +30,7 @@ package scala
* @param _15 Element 15 of this Tuple16
* @param _16 Element 16 of this Tuple16
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16)
extends Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]
{
diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala
index f531766c18..2242a15fda 100644
--- a/src/library/scala/Tuple17.scala
+++ b/src/library/scala/Tuple17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -31,6 +31,7 @@ package scala
* @param _16 Element 16 of this Tuple17
* @param _17 Element 17 of this Tuple17
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17)
extends Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]
{
diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala
index a96db25e4b..68f245c6ce 100644
--- a/src/library/scala/Tuple18.scala
+++ b/src/library/scala/Tuple18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -32,6 +32,7 @@ package scala
* @param _17 Element 17 of this Tuple18
* @param _18 Element 18 of this Tuple18
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18)
extends Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]
{
diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala
index 718280d68a..a8a49549fb 100644
--- a/src/library/scala/Tuple19.scala
+++ b/src/library/scala/Tuple19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -33,6 +33,7 @@ package scala
* @param _18 Element 18 of this Tuple19
* @param _19 Element 19 of this Tuple19
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19)
extends Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]
{
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 35d5a441c8..9ea1469c5c 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -16,6 +16,7 @@ package scala
* @param _1 Element 1 of this Tuple2
* @param _2 Element 2 of this Tuple2
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2)
extends Product2[T1, T2]
{
diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala
index 4a44c0bb89..0118d382ab 100644
--- a/src/library/scala/Tuple20.scala
+++ b/src/library/scala/Tuple20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -34,6 +34,7 @@ package scala
* @param _19 Element 19 of this Tuple20
* @param _20 Element 20 of this Tuple20
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20)
extends Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]
{
diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala
index 580a169e39..ceae94af41 100644
--- a/src/library/scala/Tuple21.scala
+++ b/src/library/scala/Tuple21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -35,6 +35,7 @@ package scala
* @param _20 Element 20 of this Tuple21
* @param _21 Element 21 of this Tuple21
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21)
extends Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]
{
diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala
index fd3392ddea..ecd567a710 100644
--- a/src/library/scala/Tuple22.scala
+++ b/src/library/scala/Tuple22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -36,6 +36,7 @@ package scala
* @param _21 Element 21 of this Tuple22
* @param _22 Element 22 of this Tuple22
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22)
extends Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]
{
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 5ed13602e3..6e71d3ae8c 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -17,6 +17,7 @@ package scala
* @param _2 Element 2 of this Tuple3
* @param _3 Element 3 of this Tuple3
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3)
extends Product3[T1, T2, T3]
{
diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala
index a859078bcf..4c84cfc674 100644
--- a/src/library/scala/Tuple4.scala
+++ b/src/library/scala/Tuple4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -18,6 +18,7 @@ package scala
* @param _3 Element 3 of this Tuple4
* @param _4 Element 4 of this Tuple4
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4)
extends Product4[T1, T2, T3, T4]
{
diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala
index 1edfb673ee..fe8e853f12 100644
--- a/src/library/scala/Tuple5.scala
+++ b/src/library/scala/Tuple5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -19,6 +19,7 @@ package scala
* @param _4 Element 4 of this Tuple5
* @param _5 Element 5 of this Tuple5
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5)
extends Product5[T1, T2, T3, T4, T5]
{
diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala
index 5b74937e58..6bf1c73d4b 100644
--- a/src/library/scala/Tuple6.scala
+++ b/src/library/scala/Tuple6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -20,6 +20,7 @@ package scala
* @param _5 Element 5 of this Tuple6
* @param _6 Element 6 of this Tuple6
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6)
extends Product6[T1, T2, T3, T4, T5, T6]
{
diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala
index a7f572e9f0..ea42709cb7 100644
--- a/src/library/scala/Tuple7.scala
+++ b/src/library/scala/Tuple7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -21,6 +21,7 @@ package scala
* @param _6 Element 6 of this Tuple7
* @param _7 Element 7 of this Tuple7
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7)
extends Product7[T1, T2, T3, T4, T5, T6, T7]
{
diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala
index 9bb427d689..c24f9454e0 100644
--- a/src/library/scala/Tuple8.scala
+++ b/src/library/scala/Tuple8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -22,6 +22,7 @@ package scala
* @param _7 Element 7 of this Tuple8
* @param _8 Element 8 of this Tuple8
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8)
extends Product8[T1, T2, T3, T4, T5, T6, T7, T8]
{
diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala
index 4d50539e0c..ed02b30df2 100644
--- a/src/library/scala/Tuple9.scala
+++ b/src/library/scala/Tuple9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -23,6 +23,7 @@ package scala
* @param _8 Element 8 of this Tuple9
* @param _9 Element 9 of this Tuple9
*/
+@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0")
case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9)
extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9]
{
diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala
index 10c6cccf15..0dfba2a187 100644
--- a/src/library/scala/UninitializedFieldError.scala
+++ b/src/library/scala/UninitializedFieldError.scala
@@ -18,8 +18,6 @@ package scala
*
* @since 2.7
*/
-final case class UninitializedFieldError(msg: String)
- extends RuntimeException(msg) {
- def this(obj: Any) =
- this(if (null != obj) obj.toString() else "null")
+final case class UninitializedFieldError(msg: String) extends RuntimeException(msg) {
+ def this(obj: Any) = this("" + obj)
}
diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala
index dc67e60314..0e59a184d1 100644
--- a/src/library/scala/Unit.scala
+++ b/src/library/scala/Unit.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/annotation/serializable.scala b/src/library/scala/annotation/serializable.scala
deleted file mode 100644
index 1e1aff19d3..0000000000
--- a/src/library/scala/annotation/serializable.scala
+++ /dev/null
@@ -1,15 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-/**
- * An annotation that designates the class to which it is applied as serializable
- */
-@deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
-class serializable extends scala.annotation.StaticAnnotation
diff --git a/src/library/scala/annotation/target/package.scala b/src/library/scala/annotation/target/package.scala
deleted file mode 100644
index ac2836c0a8..0000000000
--- a/src/library/scala/annotation/target/package.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.annotation
-
-package object target {
- @deprecated("Use `@scala.annotation.meta.beanGetter` instead", "2.10.0")
- type beanGetter = scala.annotation.meta.beanGetter
-
- @deprecated("Use `@scala.annotation.meta.beanSetter` instead", "2.10.0")
- type beanSetter = scala.annotation.meta.beanSetter
-
- @deprecated("Use `@scala.annotation.meta.field` instead", "2.10.0")
- type field = scala.annotation.meta.field
-
- @deprecated("Use `@scala.annotation.meta.getter` instead", "2.10.0")
- type getter = scala.annotation.meta.getter
-
- @deprecated("Use `@scala.annotation.meta.param` instead", "2.10.0")
- type param = scala.annotation.meta.param
-
- @deprecated("Use `@scala.annotation.meta.setter` instead", "2.10.0")
- type setter = scala.annotation.meta.setter
-}
diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala
index 3a95335d71..ac8fa263d7 100644
--- a/src/library/scala/beans/ScalaBeanInfo.scala
+++ b/src/library/scala/beans/ScalaBeanInfo.scala
@@ -27,7 +27,7 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
for (m <- clazz.getMethods if methods.exists(_ == m.getName))
yield new MethodDescriptor(m)
- init
+ init()
override def getPropertyDescriptors() = pd
override def getMethodDescriptors() = md
@@ -35,10 +35,10 @@ abstract class ScalaBeanInfo(clazz: java.lang.Class[_],
// override def getAdditionalBeanInfo() = Array(Introspector getBeanInfo clazz.getSuperclass)
private def init() {
- var i = 0;
+ var i = 0
while (i < props.length) {
pd(i/3) = new PropertyDescriptor(props(i), clazz, props(i+1), props(i+2))
- i = i + 3;
+ i = i + 3
}
}
diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala
index 6985563da2..e255e96140 100644
--- a/src/library/scala/collection/BitSet.scala
+++ b/src/library/scala/collection/BitSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala
index 4a1c0beaa6..f11f3757a6 100644
--- a/src/library/scala/collection/BitSetLike.scala
+++ b/src/library/scala/collection/BitSetLike.scala
@@ -8,10 +8,10 @@
-package scala.collection
+package scala
+package collection
import BitSetLike._
-import generic._
import mutable.StringBuilder
/** A template trait for bitsets.
@@ -70,6 +70,8 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
s
}
+ override def isEmpty: Boolean = 0 until nwords forall (i => word(i) == 0)
+
implicit def ordering: Ordering[Int] = Ordering.Int
def rangeImpl(from: Option[Int], until: Option[Int]): This = {
@@ -99,22 +101,27 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
fromBitMaskNoCopy(a)
}
- def iterator: Iterator[Int] = new AbstractIterator[Int] {
- private var current = 0
+ def iterator: Iterator[Int] = iteratorFrom(0)
+
+ override def keysIteratorFrom(start: Int) = new AbstractIterator[Int] {
+ private var current = start
private val end = nwords * WordLength
def hasNext: Boolean = {
- while (current < end && !self.contains(current)) current += 1
- current < end
+ while (current != end && !self.contains(current)) current += 1
+ current != end
}
def next(): Int =
if (hasNext) { val r = current; current += 1; r }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def foreach[B](f: Int => B) {
for (i <- 0 until nwords) {
val w = word(i)
- for (j <- i * WordLength until (i + 1) * WordLength) {
+ /* NOTE: `until` instead of `to` will not work here because
+ the maximum value of `(i + 1) * WordLength` could be
+ `Int.MaxValue + 1` (i.e. `Int.MinValue`). */
+ for (j <- i * WordLength to (i + 1) * WordLength - 1) {
if ((w & (1L << j)) != 0L) f(j)
}
}
@@ -194,11 +201,15 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
override def addString(sb: StringBuilder, start: String, sep: String, end: String) = {
sb append start
var pre = ""
- for (i <- 0 until nwords * WordLength)
+ val max = nwords * WordLength
+ var i = 0
+ while(i != max) {
if (contains(i)) {
sb append pre append i
pre = sep
}
+ i += 1
+ }
sb append end
}
@@ -209,6 +220,7 @@ trait BitSetLike[+This <: BitSetLike[This] with SortedSet[Int]] extends SortedSe
object BitSetLike {
private[collection] val LogWL = 6
private val WordLength = 64
+ private[collection] val MaxSize = (Int.MaxValue >> LogWL) + 1
private[collection] def updateArray(elems: Array[Long], idx: Int, w: Long): Array[Long] = {
var len = elems.length
diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala
index 741bca4e46..e6e97d584c 100644
--- a/src/library/scala/collection/BufferedIterator.scala
+++ b/src/library/scala/collection/BufferedIterator.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
/** Buffered iterators are iterators which provide a method `head`
* that inspects the next element without discarding it.
diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala
index 53fe32b89f..cbeb28d643 100644
--- a/src/library/scala/collection/CustomParallelizable.scala
+++ b/src/library/scala/collection/CustomParallelizable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import parallel.Combiner
diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala
index 5c91183891..8afda7cfcf 100644
--- a/src/library/scala/collection/DefaultMap.scala
+++ b/src/library/scala/collection/DefaultMap.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
-
-import generic._
+package scala
+package collection
/** A default map which implements the `+` and `-` methods of maps.
*
@@ -27,14 +24,14 @@ import generic._
* @since 2.8
*/
trait DefaultMap[A, +B] extends Map[A, B] { self =>
-
+
/** A default implementation which creates a new immutable map.
*/
override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = {
val b = Map.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
/** A default implementation which creates a new immutable map.
@@ -42,6 +39,6 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
override def - (key: A): Map[A, B] = {
val b = newBuilder
b ++= this filter (key != _._1)
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala
index b4e7a14ade..6fd4158726 100644
--- a/src/library/scala/collection/GenIterable.scala
+++ b/src/library/scala/collection/GenIterable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 2ba9a7283d..1dbb54ddc7 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -6,9 +6,10 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
-import generic.{ CanBuildFrom => CBF, _ }
+import generic.{ CanBuildFrom => CBF }
/** A template trait for all iterable collections which may possibly
* have their operations implemented in parallel.
diff --git a/src/library/scala/collection/GenIterableView.scala b/src/library/scala/collection/GenIterableView.scala
index ca0332e9ad..cd052ddf79 100644
--- a/src/library/scala/collection/GenIterableView.scala
+++ b/src/library/scala/collection/GenIterableView.scala
@@ -6,13 +6,7 @@
** |/ **
\* */
-package scala.collection
-
-
-import generic._
-
-
+package scala
+package collection
trait GenIterableView[+A, +Coll] extends GenIterableViewLike[A, Coll, GenIterableView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenIterableViewLike.scala b/src/library/scala/collection/GenIterableViewLike.scala
index 4e4ceb4cea..b519e99ae5 100644
--- a/src/library/scala/collection/GenIterableViewLike.scala
+++ b/src/library/scala/collection/GenIterableViewLike.scala
@@ -6,14 +6,8 @@
** |/ **
\* */
-package scala.collection
-
-
-
-import generic._
-import TraversableView.NoBuilder
-
-
+package scala
+package collection
trait GenIterableViewLike[+A,
+Coll,
diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala
index f7b2ae4d70..3d7427981d 100644
--- a/src/library/scala/collection/GenMap.scala
+++ b/src/library/scala/collection/GenMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 367377a59c..4e7d359251 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A trait for all maps upon which operations may be
* implemented in parallel.
diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala
index 4c5488d7e2..480562cab5 100644
--- a/src/library/scala/collection/GenSeq.scala
+++ b/src/library/scala/collection/GenSeq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index 78d63348c0..27b75c0491 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenSeqView.scala b/src/library/scala/collection/GenSeqView.scala
index 92c8b779e9..0a214832ad 100644
--- a/src/library/scala/collection/GenSeqView.scala
+++ b/src/library/scala/collection/GenSeqView.scala
@@ -6,13 +6,7 @@
** |/ **
\* */
-package scala.collection
-
-
-import generic._
-
-
+package scala
+package collection
trait GenSeqView[+A, +Coll] extends GenSeqViewLike[A, Coll, GenSeqView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenSeqViewLike.scala b/src/library/scala/collection/GenSeqViewLike.scala
index 51600218ad..d3af953f72 100644
--- a/src/library/scala/collection/GenSeqViewLike.scala
+++ b/src/library/scala/collection/GenSeqViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala
index 832177b128..2467860095 100644
--- a/src/library/scala/collection/GenSet.scala
+++ b/src/library/scala/collection/GenSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala
index f22a7c8f09..c5355e58ec 100644
--- a/src/library/scala/collection/GenSetLike.scala
+++ b/src/library/scala/collection/GenSetLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A template trait for sets which may possibly
diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala
index 3db2dd77a9..b700f49cf6 100644
--- a/src/library/scala/collection/GenTraversable.scala
+++ b/src/library/scala/collection/GenTraversable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index 46134c921e..f4aa063d8a 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
@@ -238,7 +239,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* // lettersOf will return a Set[Char], not a Seq
* def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
*
- * // xs will be a an Iterable[Int]
+ * // xs will be an Iterable[Int]
* val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
*
* // ys will be a Map[Int, Int]
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 093db2a972..d966c7324b 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import scala.reflect.ClassTag
import scala.collection.generic.CanBuildFrom
@@ -119,19 +120,6 @@ trait GenTraversableOnce[+A] extends Any {
*/
def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1
- /** A syntactic sugar for out of order folding. See `fold`.
- *
- * Example:
- * {{{
- * scala> val a = LinkedList(1,2,3,4)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2, 3, 4)
- *
- * scala> val b = (a /:\ 5)(_+_)
- * b: Int = 15
- * }}}*/
- @deprecated("use fold instead", "2.10.0")
- def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
-
/** Applies a binary operator to a start value and all elements of this $coll,
* going left to right.
*
@@ -261,11 +249,12 @@ trait GenTraversableOnce[+A] extends Any {
* @tparam B the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
+ * `Nil` for list concatenation or `0` for summation) and may be evaluated
+ * more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
- def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B
+ def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B
/** Applies a binary operator to all elements of this $coll, going right to left.
* $willNotTerminateInf
diff --git a/src/library/scala/collection/GenTraversableView.scala b/src/library/scala/collection/GenTraversableView.scala
index cceb06882e..7d9a6e9777 100644
--- a/src/library/scala/collection/GenTraversableView.scala
+++ b/src/library/scala/collection/GenTraversableView.scala
@@ -6,13 +6,7 @@
** |/ **
\* */
-package scala.collection
-
-
-import generic._
-
-
+package scala
+package collection
trait GenTraversableView[+A, +Coll] extends GenTraversableViewLike[A, Coll, GenTraversableView[A, Coll]] { }
-
-
diff --git a/src/library/scala/collection/GenTraversableViewLike.scala b/src/library/scala/collection/GenTraversableViewLike.scala
index 77fe0802bf..dde18a7a32 100644
--- a/src/library/scala/collection/GenTraversableViewLike.scala
+++ b/src/library/scala/collection/GenTraversableViewLike.scala
@@ -6,13 +6,12 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, ArrayBuffer }
-import TraversableView.NoBuilder
-
trait GenTraversableViewLike[+A,
+Coll,
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 2de0043c96..1a33026101 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
@@ -28,14 +27,13 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends SeqFactory[IndexedSeq] {
+object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
// A single CBF which can be checked against to identify
// an indexed collection type.
- override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
}
-
diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala
index 9d0e9cbaea..18c9175ee1 100644
--- a/src/library/scala/collection/IndexedSeqLike.scala
+++ b/src/library/scala/collection/IndexedSeqLike.scala
@@ -6,9 +6,9 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
-import generic._
import mutable.ArrayBuffer
import scala.annotation.tailrec
@@ -53,7 +53,6 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
// pre: start >= 0, end <= self.length
@SerialVersionUID(1756321872811029277L)
protected class Elements(start: Int, end: Int) extends AbstractIterator[A] with BufferedIterator[A] with Serializable {
- private def initialSize = if (end <= start) 0 else end - start
private var index = start
private def available = (end - index) max 0
@@ -61,7 +60,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
def next(): A = {
if (index >= end)
- Iterator.empty.next
+ Iterator.empty.next()
val x = self(index)
index += 1
@@ -70,7 +69,7 @@ trait IndexedSeqLike[+A, +Repr] extends Any with SeqLike[A, Repr] {
def head = {
if (index >= end)
- Iterator.empty.next
+ Iterator.empty.next()
self(index)
}
diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala
index 09c4b14ba0..ade04e4de8 100755
--- a/src/library/scala/collection/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/IndexedSeqOptimized.scala
@@ -33,11 +33,17 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
while (i < len) { f(this(i)); i += 1 }
}
+ private def prefixLengthImpl(p: A => Boolean, expectTrue: Boolean): Int = {
+ var i = 0
+ while (i < length && p(apply(i)) == expectTrue) i += 1
+ i
+ }
+
override /*IterableLike*/
- def forall(p: A => Boolean): Boolean = prefixLength(p(_)) == length
+ def forall(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = true) == length
override /*IterableLike*/
- def exists(p: A => Boolean): Boolean = prefixLength(!p(_)) != length
+ def exists(p: A => Boolean): Boolean = prefixLengthImpl(p, expectTrue = false) != length
override /*IterableLike*/
def find(p: A => Boolean): Option[A] = {
@@ -82,7 +88,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
b += ((this(i), that(i).asInstanceOf[B]))
i += 1
}
- b.result
+ b.result()
case _ =>
super.zip[A1, B, That](that)(bf)
}
@@ -97,7 +103,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
b += ((this(i), i))
i += 1
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -113,7 +119,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
b += self(i)
i += 1
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -214,7 +220,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
i -= 1
b += this(i)
}
- b.result
+ b.result()
}
override /*SeqLike*/
@@ -225,7 +231,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] {
if (0 < i) {
i -= 1
self(i)
- } else Iterator.empty.next
+ } else Iterator.empty.next()
}
override /*SeqLike*/
diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala
index 5b73d720a8..973efc447e 100644
--- a/src/library/scala/collection/Iterable.scala
+++ b/src/library/scala/collection/Iterable.scala
@@ -8,10 +8,10 @@
-package scala.collection
+package scala
+package collection
import generic._
-import scala.util.control.Breaks._
import mutable.Builder
/** A base trait for iterable collections.
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 540bd84b79..b043d1f2a6 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -88,13 +88,13 @@ self =>
override /*TraversableLike*/ def toIterator: Iterator[A] =
iterator
override /*TraversableLike*/ def head: A =
- iterator.next
+ iterator.next()
override /*TraversableLike*/ def slice(from: Int, until: Int): Repr = {
val lo = math.max(from, 0)
val elems = until - lo
val b = newBuilder
- if (elems <= 0) b.result
+ if (elems <= 0) b.result()
else {
b.sizeHintBounded(elems, this)
var i = 0
@@ -103,14 +103,14 @@ self =>
b += it.next
i += 1
}
- b.result
+ b.result()
}
}
override /*TraversableLike*/ def take(n: Int): Repr = {
val b = newBuilder
- if (n <= 0) b.result
+ if (n <= 0) b.result()
else {
b.sizeHintBounded(n, this)
var i = 0
@@ -119,7 +119,7 @@ self =>
b += it.next
i += 1
}
- b.result
+ b.result()
}
}
@@ -130,21 +130,21 @@ self =>
var i = 0
val it = iterator
while (i < n && it.hasNext) {
- it.next
+ it.next()
i += 1
}
- (b ++= it).result
+ (b ++= it).result()
}
override /*TraversableLike*/ def takeWhile(p: A => Boolean): Repr = {
val b = newBuilder
val it = iterator
while (it.hasNext) {
- val x = it.next
- if (!p(x)) return b.result
+ val x = it.next()
+ if (!p(x)) return b.result()
b += x
}
- b.result
+ b.result()
}
/** Partitions elements in fixed size ${coll}s.
@@ -158,7 +158,7 @@ self =>
for (xs <- iterator grouped size) yield {
val b = newBuilder
b ++= xs
- b.result
+ b.result()
}
/** Groups elements in fixed size blocks by passing a "sliding window"
@@ -187,7 +187,7 @@ self =>
for (xs <- iterator.sliding(size, step)) yield {
val b = newBuilder
b ++= xs
- b.result
+ b.result()
}
/** Selects last ''n'' elements.
@@ -203,11 +203,11 @@ self =>
val lead = this.iterator drop n
var go = false
for (x <- this.seq) {
- if (lead.hasNext) lead.next
+ if (lead.hasNext) lead.next()
else go = true
if (go) b += x
}
- b.result
+ b.result()
}
/** Selects all elements except last ''n'' ones.
@@ -224,9 +224,9 @@ self =>
val it = iterator
while (lead.hasNext) {
b += it.next
- lead.next
+ lead.next()
}
- b.result
+ b.result()
}
override /*TraversableLike*/ def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
@@ -234,7 +234,7 @@ self =>
val end = (start + len) min xs.length
val it = iterator
while (i < end && it.hasNext) {
- xs(i) = it.next
+ xs(i) = it.next()
i += 1
}
}
@@ -244,8 +244,8 @@ self =>
val these = this.iterator
val those = that.iterator
while (these.hasNext && those.hasNext)
- b += ((these.next, those.next))
- b.result
+ b += ((these.next(), those.next()))
+ b.result()
}
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CanBuildFrom[Repr, (A1, B), That]): That = {
@@ -253,12 +253,12 @@ self =>
val these = this.iterator
val those = that.iterator
while (these.hasNext && those.hasNext)
- b += ((these.next, those.next))
+ b += ((these.next(), those.next()))
while (these.hasNext)
- b += ((these.next, thatElem))
+ b += ((these.next(), thatElem))
while (those.hasNext)
- b += ((thisElem, those.next))
- b.result
+ b += ((thisElem, those.next()))
+ b.result()
}
def zipWithIndex[A1 >: A, That](implicit bf: CanBuildFrom[Repr, (A1, Int), That]): That = {
@@ -268,7 +268,7 @@ self =>
b += ((x, i))
i +=1
}
- b.result
+ b.result()
}
def sameElements[B >: A](that: GenIterable[B]): Boolean = {
diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala
index 2d041928cc..3a0e2ab115 100644
--- a/src/library/scala/collection/IterableProxy.scala
+++ b/src/library/scala/collection/IterableProxy.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-package scala.collection
-
-import generic._
+package scala
+package collection
/** This trait implements a proxy for iterable objects. It forwards all calls
* to a different iterable object.
diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala
index 6968a54399..9b8f6f3742 100644
--- a/src/library/scala/collection/IterableProxyLike.scala
+++ b/src/library/scala/collection/IterableProxyLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Buffer
diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala
index 985556e0d4..1d631739aa 100644
--- a/src/library/scala/collection/IterableView.scala
+++ b/src/library/scala/collection/IterableView.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import TraversableView.NoBuilder
diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala
index 3a81a3422f..236bfd154c 100644
--- a/src/library/scala/collection/IterableViewLike.scala
+++ b/src/library/scala/collection/IterableViewLike.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
-import TraversableView.NoBuilder
import immutable.Stream
import scala.language.implicitConversions
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index 2bb5bd1df9..72a23a0dd0 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -161,6 +161,41 @@ object Iterator {
def hasNext = true
def next = elem
}
+
+ /** Avoid stack overflows when applying ++ to lots of iterators by
+ * flattening the unevaluated iterators out into a vector of closures.
+ */
+ private[scala] final class ConcatIterator[+A](initial: Vector[() => Iterator[A]]) extends Iterator[A] {
+ // current set to null when all iterators are exhausted
+ private[this] var current: Iterator[A] = Iterator.empty
+ private[this] var queue: Vector[() => Iterator[A]] = initial
+ // Advance current to the next non-empty iterator
+ private[this] def advance(): Boolean = {
+ if (queue.isEmpty) {
+ current = null
+ false
+ }
+ else {
+ current = queue.head()
+ queue = queue.tail
+ current.hasNext || advance()
+ }
+ }
+ def hasNext = (current ne null) && (current.hasNext || advance())
+ def next() = if (hasNext) current.next else Iterator.empty.next
+
+ override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] =
+ new ConcatIterator(queue :+ (() => that.toIterator))
+ }
+
+ private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] {
+ private[this] lazy val rhs: Iterator[A] = that.toIterator
+ def hasNext = lhs.hasNext || rhs.hasNext
+ def next = if (lhs.hasNext) lhs.next else rhs.next
+
+ override def ++[B >: A](that: => GenTraversableOnce[B]) =
+ new ConcatIterator(Vector(() => this, () => that.toIterator))
+ }
}
import Iterator.empty
@@ -338,24 +373,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @usecase def ++(that: => Iterator[A]): Iterator[A]
* @inheritdoc
*/
- def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
- // optimize a little bit to prevent n log n behavior.
- private var cur : Iterator[B] = self
- private var selfExhausted : Boolean = false
- // since that is by-name, make sure it's only referenced once -
- // if "val it = that" is inside the block, then hasNext on an empty
- // iterator will continually reevaluate it. (ticket #3269)
- lazy val it = that.toIterator
- // the eq check is to avoid an infinite loop on "x ++ x"
- def hasNext = cur.hasNext || (!selfExhausted && {
- it.hasNext && {
- cur = it
- selfExhausted = true
- true
- }
- })
- def next() = { hasNext; cur.next() }
- }
+ def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new Iterator.JoinIterator(self, that)
/** Creates a new iterator by applying a function to all values produced by this iterator
* and concatenating the results.
@@ -368,7 +386,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
private var cur: Iterator[B] = empty
def hasNext: Boolean =
- cur.hasNext || self.hasNext && { cur = f(self.next).toIterator; hasNext }
+ cur.hasNext || self.hasNext && { cur = f(self.next()).toIterator; hasNext }
def next(): B = (if (hasNext) cur else empty).next()
}
@@ -408,7 +426,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def corresponds[B](that: GenTraversableOnce[B])(p: (A, B) => Boolean): Boolean = {
val that0 = that.toIterator
while (hasNext && that0.hasNext)
- if (!p(next, that0.next)) return false
+ if (!p(next(), that0.next())) return false
hasNext == that0.hasNext
}
@@ -555,14 +573,13 @@ trait Iterator[+A] extends TraversableOnce[A] {
def span(p: A => Boolean): (Iterator[A], Iterator[A]) = {
val self = buffered
- /**
+ /*
* Giving a name to following iterator (as opposed to trailing) because
* anonymous class is represented as a structural type that trailing
* iterator is referring (the finish() method) and thus triggering
* handling of structural calls. It's not what's intended here.
*/
class Leading extends AbstractIterator[A] {
- private var isDone = false
val lookahead = new mutable.Queue[A]
def advance() = {
self.hasNext && p(self.head) && {
@@ -572,7 +589,6 @@ trait Iterator[+A] extends TraversableOnce[A] {
}
def finish() = {
while (advance()) ()
- isDone = true
}
def hasNext = lookahead.nonEmpty || advance()
def next() = {
@@ -632,7 +648,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
*/
def zip[B](that: Iterator[B]): Iterator[(A, B)] = new AbstractIterator[(A, B)] {
def hasNext = self.hasNext && that.hasNext
- def next = (self.next, that.next)
+ def next = (self.next(), that.next())
}
/** Appends an element value to this iterator until a given target length is reached.
@@ -652,9 +668,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
def hasNext = self.hasNext || count < len
def next = {
count += 1
- if (self.hasNext) self.next
+ if (self.hasNext) self.next()
else if (count <= len) elem
- else empty.next
+ else empty.next()
}
}
@@ -669,7 +685,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
var idx = 0
def hasNext = self.hasNext
def next = {
- val ret = (self.next, idx)
+ val ret = (self.next(), idx)
idx += 1
ret
}
@@ -1054,12 +1070,12 @@ trait Iterator[+A] extends TraversableOnce[A] {
val e = self.next()
gap enqueue e
e
- } else gap.dequeue
+ } else gap.dequeue()
}
// to verify partnerhood we use reference equality on gap because
// type testing does not discriminate based on origin.
private def compareGap(queue: scala.collection.mutable.Queue[A]) = gap eq queue
- override def hashCode = gap.hashCode
+ override def hashCode = gap.hashCode()
override def equals(other: Any) = other match {
case x: Partner => x.compareGap(gap) && gap.isEmpty
case _ => super.equals(other)
@@ -1118,6 +1134,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
xs(i) = next()
i += 1
}
+ // TODO: return i - start so the caller knows how many values read?
}
/** Tests if another iterator produces the same values as this one.
@@ -1140,7 +1157,7 @@ trait Iterator[+A] extends TraversableOnce[A] {
def toTraversable: Traversable[A] = toStream
def toIterator: Iterator[A] = self
def toStream: Stream[A] =
- if (self.hasNext) Stream.cons(self.next, self.toStream)
+ if (self.hasNext) Stream.cons(self.next(), self.toStream)
else Stream.empty[A]
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 59d4259c70..7bfa60771f 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -6,9 +6,9 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import convert._
/** A collection of implicit conversions supporting interoperability between
@@ -22,7 +22,6 @@ import convert._
* scala.collection.mutable.Buffer <=> java.util.List
* scala.collection.mutable.Set <=> java.util.Set
* scala.collection.mutable.Map <=> java.util.{ Map, Dictionary }
- * scala.collection.mutable.ConcurrentMap (deprecated since 2.10) <=> java.util.concurrent.ConcurrentMap
* scala.collection.concurrent.Map <=> java.util.concurrent.ConcurrentMap
*}}}
* In all cases, converting from a source type to a target type and back
@@ -50,83 +49,4 @@ import convert._
* @author Martin Odersky
* @since 2.8
*/
-object JavaConversions extends WrapAsScala with WrapAsJava {
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B] = Wrappers.ConcurrentMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B] = Wrappers.DictionaryWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A] = Wrappers.IterableWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A] = Wrappers.IteratorWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A] = Wrappers.JCollectionWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B] = Wrappers.JDictionaryWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A] = Wrappers.JEnumerationWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A] = Wrappers.JIterableWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A] = Wrappers.JIteratorWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A] = Wrappers.JListWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B] = Wrappers.JMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper = Wrappers.JPropertiesWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A] = Wrappers.JSetWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B] = Wrappers.MapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A] = Wrappers.MutableBufferWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B] = Wrappers.MutableMapWrapper[A, B]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A] = Wrappers.MutableSeqWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A] = Wrappers.MutableSetWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A] = Wrappers.SeqWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A] = Wrappers.SetWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A] = Wrappers.ToIteratorWrapper[A]
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper = Wrappers.DictionaryWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper = Wrappers.IterableWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper = Wrappers.IteratorWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper = Wrappers.JCollectionWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper = Wrappers.JConcurrentMapWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper = Wrappers.JDictionaryWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper = Wrappers.JEnumerationWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper = Wrappers.JIterableWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper = Wrappers.JIteratorWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper = Wrappers.JListWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper = Wrappers.JMapWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper = Wrappers.JPropertiesWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper = Wrappers.JSetWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper = Wrappers.MutableBufferWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper = Wrappers.MutableMapWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper
- @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper
-
- // Note to implementors: the cavalcade of deprecated methods herein should
- // serve as a warning to any who follow: don't overload implicit methods.
-
- @deprecated("use bufferAsJavaList instead", "2.9.0")
- def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
-
- @deprecated("use mutableSeqAsJavaList instead", "2.9.0")
- def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
-
- @deprecated("use seqAsJavaList instead", "2.9.0")
- def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
-
- @deprecated("use mutableSetAsJavaSet instead", "2.9.0")
- def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
-
- @deprecated("use setAsJavaSet instead", "2.9.0")
- def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
-
- @deprecated("use mutableMapAsJavaMap instead", "2.9.0")
- def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
-
- @deprecated("use mapAsJavaMap instead", "2.9.0")
- def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
-
- @deprecated("use iterableAsScalaIterable instead", "2.9.0")
- def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
-
- @deprecated("use collectionAsScalaIterable instead", "2.9.0")
- def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
-
- @deprecated("use mapAsScalaMap instead", "2.9.0")
- def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
-
- @deprecated("use propertiesAsScalaMap instead", "2.9.0")
- def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
-}
-
-
+object JavaConversions extends WrapAsScala with WrapAsJava
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index ab3ac8925c..a4fa58b13c 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -6,16 +6,15 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import convert._
// TODO: I cleaned all this documentation up in JavaConversions, but the
// documentation in here is basically the pre-cleaned-up version with minor
// additions. Would be nice to have in one place.
-
/** A collection of decorators that allow converting between
* Scala and Java collections using `asScala` and `asJava` methods.
*
@@ -26,7 +25,7 @@ import convert._
* - `scala.collection.mutable.Buffer` <=> `java.util.List`
* - `scala.collection.mutable.Set` <=> `java.util.Set`
* - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
*
* In all cases, converting from a source type to a target type and back
* again will return the original source object, e.g.
@@ -56,48 +55,4 @@ import convert._
* @author Martin Odersky
* @since 2.8.1
*/
-object JavaConverters extends DecorateAsJava with DecorateAsScala {
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJava[A] = Decorators.AsJava[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsScala[A] = Decorators.AsScala[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJavaCollection[A] = Decorators.AsJavaCollection[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
- @deprecated("Don't access these decorators directly.", "2.10.0")
- type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B]
-
- @deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
- def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
-
- @deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
- def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
-
- @deprecated("Use seqAsJavaListConverter instead", "2.9.0")
- def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
-
- @deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
- def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
-
- @deprecated("Use setAsJavaSetConverter instead", "2.9.0")
- def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
-
- @deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
- def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
-
- @deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
- def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
-
- @deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
- def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
-
- @deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
- def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
-
- @deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
-
- @deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p)
-}
+object JavaConverters extends DecorateAsJava with DecorateAsScala
diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala
index e52a1936fe..1e4975a0a7 100644
--- a/src/library/scala/collection/LinearSeq.scala
+++ b/src/library/scala/collection/LinearSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index 78108a9c0f..ff7985bf0d 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
+package scala
+package collection
-package scala.collection
-
-import generic._
-import mutable.ListBuffer
import immutable.List
-import scala.util.control.Breaks._
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]`.
@@ -59,14 +56,14 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
def next(): A =
if (hasNext) {
val result = these.head; these = these.tail; result
- } else Iterator.empty.next
+ } else Iterator.empty.next()
/** Have to clear `these` so the iterator is exhausted like
* it would be without the optimization.
*/
override def toList: List[A] = {
val xs = these.toList
- these = newBuilder.result
+ these = newBuilder.result()
xs
}
}
diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala
index 81cccea519..21bfedf5de 100755
--- a/src/library/scala/collection/LinearSeqOptimized.scala
+++ b/src/library/scala/collection/LinearSeqOptimized.scala
@@ -6,12 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
-import generic._
import mutable.ListBuffer
import immutable.List
-import scala.util.control.Breaks._
import scala.annotation.tailrec
/** A template trait for linear sequences of type `LinearSeq[A]` which optimizes
@@ -84,7 +83,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
}
override /*SeqLike*/
- def contains(elem: Any): Boolean = {
+ def contains[A1 >: A](elem: A1): Boolean = {
var these = this
while (!these.isEmpty) {
if (these.head == elem) return true
@@ -152,7 +151,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- b.result
+ b.result()
}
override /*TraversableLike*/
@@ -187,7 +186,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
these = these.tail
lead = lead.tail
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -195,7 +194,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
var these: Repr = repr
var count = from max 0
if (until <= count)
- return newBuilder.result
+ return newBuilder.result()
val b = newBuilder
var sliceElems = until - count
@@ -208,7 +207,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- b.result
+ b.result()
}
override /*IterableLike*/
@@ -219,7 +218,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- b.result
+ b.result()
}
override /*TraversableLike*/
@@ -230,7 +229,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea
b += these.head
these = these.tail
}
- (b.result, these)
+ (b.result(), these)
}
override /*IterableLike*/
diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala
index 18ad20a855..f37c0993d4 100644
--- a/src/library/scala/collection/Map.scala
+++ b/src/library/scala/collection/Map.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 93d02a435c..5ec7d5c615 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, MapBuilder }
@@ -181,7 +181,7 @@ self =>
def keysIterator: Iterator[A] = new AbstractIterator[A] {
val iter = self.iterator
def hasNext = iter.hasNext
- def next() = iter.next._1
+ def next() = iter.next()._1
}
/** Collects all keys of this map in an iterable collection.
@@ -213,7 +213,7 @@ self =>
def valuesIterator: Iterator[B] = new AbstractIterator[B] {
val iter = self.iterator
def hasNext = iter.hasNext
- def next() = iter.next._2
+ def next() = iter.next()._2
}
/** Defines the default value computation for the map,
diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala
index e85d306e6f..941c1f5a4a 100644
--- a/src/library/scala/collection/MapProxy.scala
+++ b/src/library/scala/collection/MapProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** This is a simple wrapper class for [[scala.collection.Map]].
* It is most useful for assembling customized map abstractions
diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala
index 44b39f65da..44481131aa 100644
--- a/src/library/scala/collection/MapProxyLike.scala
+++ b/src/library/scala/collection/MapProxyLike.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-package scala.collection
-
-import generic._
+package scala
+package collection
// Methods could be printed by cat MapLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala
index 6731f74bea..174e3ab75e 100644
--- a/src/library/scala/collection/Parallel.scala
+++ b/src/library/scala/collection/Parallel.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** A marker trait for collections which have their operations parallelised.
*
diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala
index d97c44abc0..b737752458 100644
--- a/src/library/scala/collection/Parallelizable.scala
+++ b/src/library/scala/collection/Parallelizable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import parallel.Combiner
@@ -39,7 +40,7 @@ trait Parallelizable[+A, +ParRepr <: Parallel] extends Any {
def par: ParRepr = {
val cb = parCombiner
for (x <- seq) cb += x
- cb.result
+ cb.result()
}
/** The default `par` implementation uses the combiner provided by this method
diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala
new file mode 100644
index 0000000000..fec4bbf502
--- /dev/null
+++ b/src/library/scala/collection/Searching.scala
@@ -0,0 +1,118 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package collection
+
+import scala.language.implicitConversions
+import scala.annotation.tailrec
+import scala.collection.generic.IsSeqLike
+import scala.math.Ordering
+
+/** A collection of wrappers that provide sequence classes with search functionality.
+ *
+ * Example usage:
+ * {{{
+ * import scala.collection.Searching._
+ * val l = List(1, 2, 3, 4, 5)
+ * l.search(3)
+ * // == Found(2)
+ * }}}
+ */
+object Searching {
+ sealed abstract class SearchResult {
+ def insertionPoint: Int
+ }
+
+ case class Found(foundIndex: Int) extends SearchResult {
+ override def insertionPoint = foundIndex
+ }
+ case class InsertionPoint(insertionPoint: Int) extends SearchResult
+
+ class SearchImpl[A, Repr](val coll: SeqLike[A, Repr]) {
+ /** Search the sorted sequence for a specific element. If the sequence is an
+ * `IndexedSeq`, a binary search is used. Otherwise, a linear search is used.
+ *
+ * The sequence should be sorted with the same `Ordering` before calling; otherwise,
+ * the results are undefined.
+ *
+ * @see [[scala.collection.IndexedSeq]]
+ * @see [[scala.math.Ordering]]
+ * @see [[scala.collection.SeqLike]], method `sorted`
+ *
+ * @param elem the element to find.
+ * @param ord the ordering to be used to compare elements.
+ *
+ * @return a `Found` value containing the index corresponding to the element in the
+ * sequence, or the `InsertionPoint` where the element would be inserted if
+ * the element is not in the sequence.
+ */
+ final def search[B >: A](elem: B)(implicit ord: Ordering[B]): SearchResult =
+ coll match {
+ case _: IndexedSeq[A] => binarySearch(elem, -1, coll.length)(ord)
+ case _ => linearSearch(coll.view, elem, 0)(ord)
+ }
+
+ /** Search within an interval in the sorted sequence for a specific element. If the
+ * sequence is an IndexedSeq, a binary search is used. Otherwise, a linear search
+ * is used.
+ *
+ * The sequence should be sorted with the same `Ordering` before calling; otherwise,
+ * the results are undefined.
+ *
+ * @see [[scala.collection.IndexedSeq]]
+ * @see [[scala.math.Ordering]]
+ * @see [[scala.collection.SeqLike]], method `sorted`
+ *
+ * @param elem the element to find.
+ * @param from the index where the search starts.
+ * @param to the index following where the search ends.
+ * @param ord the ordering to be used to compare elements.
+ *
+ * @return a `Found` value containing the index corresponding to the element in the
+ * sequence, or the `InsertionPoint` where the element would be inserted if
+ * the element is not in the sequence.
+ */
+ final def search[B >: A](elem: B, from: Int, to: Int)
+ (implicit ord: Ordering[B]): SearchResult =
+ coll match {
+ case _: IndexedSeq[A] => binarySearch(elem, from-1, to)(ord)
+ case _ => linearSearch(coll.view(from, to), elem, from)(ord)
+ }
+
+ @tailrec
+ private def binarySearch[B >: A](elem: B, from: Int, to: Int)
+ (implicit ord: Ordering[B]): SearchResult = {
+ if ((to-from) == 1) InsertionPoint(from) else {
+ val idx = from+(to-from)/2
+ math.signum(ord.compare(elem, coll(idx))) match {
+ case -1 => binarySearch(elem, from, idx)(ord)
+ case 1 => binarySearch(elem, idx, to)(ord)
+ case _ => Found(idx)
+ }
+ }
+ }
+
+ private def linearSearch[B >: A](c: SeqView[A, Repr], elem: B, offset: Int)
+ (implicit ord: Ordering[B]): SearchResult = {
+ var idx = offset
+ val it = c.iterator
+ while (it.hasNext) {
+ val cur = it.next()
+ if (ord.equiv(elem, cur)) return Found(idx)
+ else if (ord.lt(elem, cur)) return InsertionPoint(idx-1)
+ idx += 1
+ }
+ InsertionPoint(idx)
+ }
+
+ }
+
+ implicit def search[Repr, A](coll: Repr)
+ (implicit fr: IsSeqLike[Repr]): SearchImpl[fr.A, Repr] = new SearchImpl(fr.conversion(coll))
+}
diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala
index 33e66c0874..b21acdd9b7 100644
--- a/src/library/scala/collection/Seq.scala
+++ b/src/library/scala/collection/Seq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala
index 20ea7f54b7..2398313c77 100644
--- a/src/library/scala/collection/SeqExtractors.scala
+++ b/src/library/scala/collection/SeqExtractors.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
/** An extractor used to head/tail deconstruct sequences. */
object +: {
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 1be0dba29f..c02ea98914 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -107,7 +107,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def segmentLength(p: A => Boolean, from: Int): Int = {
var i = 0
- var it = iterator.drop(from)
+ val it = iterator.drop(from)
while (it.hasNext && p(it.next()))
i += 1
i
@@ -115,7 +115,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def indexWhere(p: A => Boolean, from: Int): Int = {
var i = from
- var it = iterator.drop(from)
+ val it = iterator.drop(from)
while (it.hasNext) {
if (p(it.next())) return i
else i += 1
@@ -127,7 +127,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def lastIndexWhere(p: A => Boolean, end: Int): Int = {
var i = length - 1
val it = reverseIterator
- while (it.hasNext && { val elem = it.next; (i > end || !p(elem)) }) i -= 1
+ while (it.hasNext && { val elem = it.next(); (i > end || !p(elem)) }) i -= 1
i
}
@@ -156,10 +156,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def hasNext = _hasNext
def next(): Repr = {
if (!hasNext)
- Iterator.empty.next
+ Iterator.empty.next()
val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms
- val result = (self.newBuilder ++= forcedElms).result
+ val result = (self.newBuilder ++= forcedElms).result()
var i = idxs.length - 2
while(i >= 0 && idxs(i) >= idxs(i+1))
i -= 1
@@ -181,10 +181,10 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
result
}
private def swap(i: Int, j: Int) {
- var tmpI = idxs(i)
+ val tmpI = idxs(i)
idxs(i) = idxs(j)
idxs(j) = tmpI
- var tmpE = elms(i)
+ val tmpE = elms(i)
elms(i) = elms(j)
elms(j) = tmpE
}
@@ -208,15 +208,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def hasNext = _hasNext
def next(): Repr = {
if (!hasNext)
- Iterator.empty.next
+ Iterator.empty.next()
- /** Calculate this result. */
+ /* Calculate this result. */
val buf = self.newBuilder
for(k <- 0 until nums.length; j <- 0 until nums(k))
buf += elms(offs(k)+j)
- val res = buf.result
+ val res = buf.result()
- /** Prepare for the next call to next. */
+ /* Prepare for the next call to next. */
var idx = nums.length - 1
while (idx >= 0 && nums(idx) == cnts(idx))
idx -= 1
@@ -268,7 +268,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b.sizeHint(this)
for (x <- xs)
b += x
- b.result
+ b.result()
}
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -279,7 +279,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
for (x <- xs)
b += f(x)
- b.result
+ b.result()
}
/** An iterator yielding elements in reversed order.
@@ -335,7 +335,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (from > l) -1
else if (tl < 1) clippedFrom
else if (l < tl) -1
- else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, true)
+ else SeqLike.kmpSearch(thisCollection, clippedFrom, l, that.seq, 0, tl, forward = true)
}
else {
var i = from
@@ -372,7 +372,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
if (end < 0) -1
else if (tl < 1) clippedL
else if (l < tl) -1
- else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, false)
+ else SeqLike.kmpSearch(thisCollection, 0, clippedL+tl, that.seq, 0, tl, forward = false)
}
/** Tests whether this $coll contains a given sequence as a slice.
@@ -390,7 +390,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* @return `true` if this $coll has an element that is equal (as
* determined by `==`) to `elem`, `false` otherwise.
*/
- def contains(elem: Any): Boolean = exists (_ == elem)
+ def contains[A1 >: A](elem: A1): Boolean = exists (_ == elem)
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
@@ -442,7 +442,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
for (x <- this)
if (occ(x) == 0) b += x
else occ(x) -= 1
- b.result
+ b.result()
}
/** Computes the multiset intersection between this $coll and another sequence.
@@ -473,7 +473,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b += x
occ(x) -= 1
}
- b.result
+ b.result()
}
private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
@@ -496,7 +496,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
seen += x
}
}
- b.result
+ b.result()
}
def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -505,7 +505,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b ++= toCollection(prefix)
b ++= patch.seq
b ++= toCollection(rest).view drop replaced
- b.result
+ b.result()
}
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -514,21 +514,21 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b ++= toCollection(prefix)
b += elem
b ++= toCollection(rest).view.tail
- b.result
+ b.result()
}
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b += elem
b ++= thisCollection
- b.result
+ b.result()
}
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
b ++= thisCollection
b += elem
- b.result
+ b.result()
}
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
@@ -540,14 +540,14 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
b += elem
diff -= 1
}
- b.result
+ b.result()
}
def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = {
val i = this.iterator
val j = that.iterator
while (i.hasNext && j.hasNext)
- if (!p(i.next, j.next))
+ if (!p(i.next(), j.next()))
return false
!i.hasNext && !j.hasNext
@@ -616,7 +616,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
val b = newBuilder
b.sizeHint(len)
for (x <- arr) b += x
- b.result
+ b.result()
}
/** Converts this $coll to a sequence.
@@ -682,7 +682,7 @@ object SeqLike {
val wit = W.iterator.drop(n0)
var i = if (forward) 0 else (n1-n0-1)
while (i != done) {
- Warr(i) = wit.next.asInstanceOf[AnyRef]
+ Warr(i) = wit.next().asInstanceOf[AnyRef]
i += delta
}
@@ -778,15 +778,15 @@ object SeqLike {
case _ =>
// We had better not index into S directly!
val iter = S.iterator.drop(m0)
- val Wopt = kmpOptimizeWord(W, n0, n1, true)
+ val Wopt = kmpOptimizeWord(W, n0, n1, forward = true)
val T = kmpJumpTable(Wopt, n1-n0)
- var cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
+ val cache = new Array[AnyRef](n1-n0) // Ring buffer--need a quick way to do a look-behind
var largest = 0
var i, m = 0
var answer = -1
while (m+m0+n1-n0 <= m1) {
while (i+m >= largest) {
- cache(largest%(n1-n0)) = iter.next.asInstanceOf[AnyRef]
+ cache(largest%(n1-n0)) = iter.next().asInstanceOf[AnyRef]
largest += 1
}
if (Wopt(i) == cache((i+m)%(n1-n0))) {
@@ -851,7 +851,7 @@ object SeqLike {
else if (s1 - s0 < t1 - t0) -1 // Source is too short to find target
else {
// Nontrivial search
- val ans = kmpSearch(source, s0, s1, target, t0, t1, true)
+ val ans = kmpSearch(source, s0, s1, target, t0, t1, forward = true)
if (ans < 0) ans else ans - math.min(slen, sourceOffset)
}
}
@@ -883,7 +883,7 @@ object SeqLike {
else if (fixed_s1 - s0 < t1 - t0) -1 // Source is too short to find target
else {
// Nontrivial search
- val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, false)
+ val ans = kmpSearch(source, s0, fixed_s1, target, t0, t1, forward = false)
if (ans < 0) ans else ans - s0
}
}
diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala
index 1f8dc4aad1..9c5424a3a6 100644
--- a/src/library/scala/collection/SeqProxy.scala
+++ b/src/library/scala/collection/SeqProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
/** This trait implements a proxy for sequence objects. It forwards
* all calls to a different sequence object.
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index 5e8030d1e4..161b23d3f8 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
@@ -50,7 +51,7 @@ trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A,
override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = self.lastIndexOfSlice(that)
override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = self.lastIndexOfSlice(that, end)
override def containsSlice[B](that: GenSeq[B]): Boolean = self.indexOfSlice(that) != -1
- override def contains(elem: Any): Boolean = self.contains(elem)
+ override def contains[A1 >: A](elem: A1): Boolean = self.contains(elem)
override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = self.union(that)(bf)
override def diff[B >: A](that: GenSeq[B]): Repr = self.diff(that)
override def intersect[B >: A](that: GenSeq[B]): Repr = self.intersect(that)
diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala
index c26124cf6f..40dfd50212 100644
--- a/src/library/scala/collection/SeqView.scala
+++ b/src/library/scala/collection/SeqView.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import TraversableView.NoBuilder
diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala
index 5f2bf902b1..1194cd7199 100644
--- a/src/library/scala/collection/SeqViewLike.scala
+++ b/src/library/scala/collection/SeqViewLike.scala
@@ -6,11 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import Seq.fill
-import TraversableView.NoBuilder
/** A template trait for non-strict views of sequences.
* $seqViewInfo
diff --git a/src/library/scala/collection/Sequentializable.scala.disabled b/src/library/scala/collection/Sequentializable.scala.disabled
deleted file mode 100644
index df457671a6..0000000000
--- a/src/library/scala/collection/Sequentializable.scala.disabled
+++ /dev/null
@@ -1,10 +0,0 @@
-package scala.collection
-
-
-
-
-trait Sequentializable[+T, +Repr] {
-
- def seq: Repr
-
-}
diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala
index c304323d28..46d5dfa056 100644
--- a/src/library/scala/collection/Set.scala
+++ b/src/library/scala/collection/Set.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
import generic._
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index a6ebcc0e20..0c5c7e0b29 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, SetBuilder }
@@ -180,14 +180,14 @@ self =>
def hasNext = len <= elms.size || itr.hasNext
def next = {
if (!itr.hasNext) {
- if (len > elms.size) Iterator.empty.next
+ if (len > elms.size) Iterator.empty.next()
else {
itr = new SubsetsItr(elms, len)
len += 1
}
}
- itr.next
+ itr.next()
}
}
@@ -205,11 +205,11 @@ self =>
def hasNext = _hasNext
def next(): This = {
- if (!hasNext) Iterator.empty.next
+ if (!hasNext) Iterator.empty.next()
val buf = self.newBuilder
idxs.slice(0, len) foreach (idx => buf += elms(idx))
- val result = buf.result
+ val result = buf.result()
var i = len - 1
while (i >= 0 && idxs(i) == idxs(i+1)-1) i -= 1
diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala
index 08075a7121..f9f38f148a 100644
--- a/src/library/scala/collection/SetProxy.scala
+++ b/src/library/scala/collection/SetProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
/** This is a simple wrapper class for [[scala.collection.Set]].
* It is most useful for assembling customized set abstractions
diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala
index 5196f39917..ac3d34dbab 100644
--- a/src/library/scala/collection/SetProxyLike.scala
+++ b/src/library/scala/collection/SetProxyLike.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
-package scala.collection
-
-import generic._
+package scala
+package collection
// Methods could be printed by cat SetLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala
index c81c16e8bb..0705a1e9e0 100644
--- a/src/library/scala/collection/SortedMap.scala
+++ b/src/library/scala/collection/SortedMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
@@ -40,13 +41,13 @@ object SortedMap extends SortedMapFactory[SortedMap] {
val b = SortedMap.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala
index 57ad3497c7..3fc8b0dadc 100644
--- a/src/library/scala/collection/SortedMapLike.scala
+++ b/src/library/scala/collection/SortedMapLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
@@ -42,6 +43,7 @@ self =>
val map = self.rangeImpl(from, until)
new map.DefaultKeySortedSet
}
+ override def keysIteratorFrom(start: A) = self.keysIteratorFrom(start)
}
/** Add a key/value pair to this map.
@@ -68,7 +70,7 @@ self =>
* @param elems the remaining elements to add.
*/
override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): SortedMap[A, B1] = {
- var m = this + elem1 + elem2;
+ var m = this + elem1 + elem2
for (e <- elems) m = m + e
m
}
@@ -76,11 +78,17 @@ self =>
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+ override def keysIteratorFrom(start: A) = self keysIteratorFrom start filter p
+ override def valuesIteratorFrom(start: A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
}
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ override def iteratorFrom(start: A) = (self iteratorFrom start) map {case (k,v) => (k, f(v))}
+ override def keysIteratorFrom(start: A) = self keysIteratorFrom start
+ override def valuesIteratorFrom(start: A) = self valuesIteratorFrom start map f
}
/** Adds a number of elements provided by a traversable object
@@ -91,6 +99,28 @@ self =>
override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): SortedMap[A, B1] =
((repr: SortedMap[A, B1]) /: xs.seq) (_ + _)
+ /**
+ * Creates an iterator over all the key/value pairs
+ * contained in this map having a key greater than or
+ * equal to `start` according to the ordering of
+ * this map. x.iteratorFrom(y) is equivalent
+ * to but often more efficient than x.from(y).iterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def iteratorFrom(start: A): Iterator[(A, B)]
+ /**
+ * Creates an iterator over all the values contained in this
+ * map that are associated with a key greater than or equal to `start`
+ * according to the ordering of this map. x.valuesIteratorFrom(y) is
+ * equivalent to but often more efficient than
+ * x.from(y).valuesIterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def valuesIteratorFrom(start: A): Iterator[B]
}
diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala
index 2d5d4fb55e..43189d2e8c 100644
--- a/src/library/scala/collection/SortedSet.scala
+++ b/src/library/scala/collection/SortedSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
import generic._
/** A sorted set.
diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala
index 71b45c72ff..eb2ac38c59 100644
--- a/src/library/scala/collection/SortedSetLike.scala
+++ b/src/library/scala/collection/SortedSetLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
import generic._
/** A template for sets which are sorted.
@@ -40,4 +41,14 @@ self =>
case that: SortedSet[_] if that.ordering == ordering => that.hasAll(this.iterator)
case that => super.subsetOf(that)
}
+
+ /**
+ * Creates an iterator that contains all values from this collection
+ * greater than or equal to `start` according to the ordering of
+ * this collection. x.iteratorFrom(y) is equivalent to but will usually
+ * be more efficient than x.from(y).iterator
+ *
+ * @param start The lower-bound (inclusive) of the iterator
+ */
+ def iteratorFrom(start: A): Iterator[A] = keysIteratorFrom(start)
}
diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala
index 36ef230a42..61d9a42f04 100644
--- a/src/library/scala/collection/Traversable.scala
+++ b/src/library/scala/collection/Traversable.scala
@@ -6,12 +6,11 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
import generic._
-import mutable.{Builder, Buffer, ArrayBuffer, ListBuffer}
+import mutable.Builder
import scala.util.control.Breaks
/** A trait for traversable collections.
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 5f193eb211..00f4de82cd 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder }
@@ -77,7 +78,7 @@ trait TraversableLike[+A, +Repr] extends Any
import Traversable.breaks._
/** The type implementing this traversable */
- protected type Self = Repr
+ protected[this] type Self = Repr
/** The collection of type $coll underlying this `TraversableLike` object.
* By default this is implemented as the `TraversableLike` object itself,
@@ -86,7 +87,7 @@ trait TraversableLike[+A, +Repr] extends Any
def repr: Repr = this.asInstanceOf[Repr]
final def isTraversableAgain: Boolean = true
-
+
/** The underlying collection seen as an instance of `$Coll`.
* By default this is implemented as the current collection object itself,
* but this can be overridden.
@@ -174,7 +175,7 @@ trait TraversableLike[+A, +Repr] extends Any
*
* @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
* @inheritdoc
- *
+ *
* Example:
* {{{
* scala> val x = List(1)
@@ -252,18 +253,21 @@ trait TraversableLike[+A, +Repr] extends Any
b.result
}
+ private def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = {
+ val b = newBuilder
+ for (x <- this)
+ if (p(x) != isFlipped) b += x
+
+ b.result
+ }
+
/** Selects all elements of this $coll which satisfy a predicate.
*
* @param p the predicate used to test elements.
* @return a new $coll consisting of all elements of this $coll that satisfy the given
* predicate `p`. The order of the elements is preserved.
*/
- def filter(p: A => Boolean): Repr = {
- val b = newBuilder
- for (x <- this)
- if (p(x)) b += x
- b.result
- }
+ def filter(p: A => Boolean): Repr = filterImpl(p, isFlipped = false)
/** Selects all elements of this $coll which do not satisfy a predicate.
*
@@ -271,11 +275,11 @@ trait TraversableLike[+A, +Repr] extends Any
* @return a new $coll consisting of all elements of this $coll that do not satisfy the given
* predicate `p`. The order of the elements is preserved.
*/
- def filterNot(p: A => Boolean): Repr = filter(!p(_))
+ def filterNot(p: A => Boolean): Repr = filterImpl(p, isFlipped = true)
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
- for (x <- this) if (pf.isDefinedAt(x)) b += pf(x)
+ foreach(pf.runWith(b += _))
b.result
}
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index a448ac2c09..526c36dda7 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import mutable.{ Buffer, Builder, ListBuffer, ArrayBuffer }
import generic.CanBuildFrom
@@ -128,10 +129,8 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
* @example `Seq("a", 1, 5L).collectFirst({ case x: Int => x*10 }) = Some(10)`
*/
def collectFirst[B](pf: PartialFunction[A, B]): Option[B] = {
- for (x <- self.toIterator) { // make sure to use an iterator or `seq`
- if (pf isDefinedAt x)
- return Some(pf(x))
- }
+ // make sure to use an iterator or `seq`
+ self.toIterator.foreach(pf.runWith(b => return Some(b)))
None
}
@@ -198,7 +197,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def fold[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = foldLeft(z)(op)
- def aggregate[B](z: B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
+ def aggregate[B](z: =>B)(seqop: (B, A) => B, combop: (B, B) => B): B = foldLeft(z)(seqop)
def sum[B >: A](implicit num: Numeric[B]): B = foldLeft(num.zero)(num.plus)
@@ -271,7 +270,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, A, Col[A @uV]]): Col[A @uV] = {
val b = cbf()
b ++= seq
- b.result
+ b.result()
}
def toMap[T, U](implicit ev: A <:< (T, U)): immutable.Map[T, U] = {
@@ -279,7 +278,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
for (x <- self)
b += x
- b.result
+ b.result()
}
def mkString(start: String, sep: String, end: String): String =
@@ -380,27 +379,22 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
object TraversableOnce {
- @deprecated("use OnceCanBuildFrom instead", "2.10.0")
- def traversableOnceCanBuildFrom[T] = new OnceCanBuildFrom[T]
- @deprecated("use MonadOps instead", "2.10.0")
- def wrapTraversableOnce[A](trav: TraversableOnce[A]) = new MonadOps(trav)
-
implicit def alternateImplicit[A](trav: TraversableOnce[A]) = new ForceImplicitAmbiguity
implicit def flattenTraversableOnce[A, CC[_]](travs: TraversableOnce[CC[A]])(implicit ev: CC[A] => TraversableOnce[A]) =
new FlattenOps[A](travs map ev)
/* Functionality reused in Iterator.CanBuildFrom */
- private[collection] abstract class BufferedCanBuildFrom[A, Coll[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[Coll[_], A, Coll[A]] {
- def bufferToColl[B](buff: ArrayBuffer[B]): Coll[B]
- def traversableToColl[B](t: GenTraversable[B]): Coll[B]
+ private[collection] abstract class BufferedCanBuildFrom[A, CC[X] <: TraversableOnce[X]] extends generic.CanBuildFrom[CC[_], A, CC[A]] {
+ def bufferToColl[B](buff: ArrayBuffer[B]): CC[B]
+ def traversableToColl[B](t: GenTraversable[B]): CC[B]
- def newIterator: Builder[A, Coll[A]] = new ArrayBuffer[A] mapResult bufferToColl
+ def newIterator: Builder[A, CC[A]] = new ArrayBuffer[A] mapResult bufferToColl
/** Creates a new builder on request of a collection.
* @param from the collection requesting the builder to be created.
* @return the result of invoking the `genericBuilder` method on `from`.
*/
- def apply(from: Coll[_]): Builder[A, Coll[A]] = from match {
+ def apply(from: CC[_]): Builder[A, CC[A]] = from match {
case xs: generic.GenericTraversableTemplate[_, _] => xs.genericBuilder.asInstanceOf[Builder[A, Traversable[A]]] mapResult {
case res => traversableToColl(res.asInstanceOf[GenTraversable[A]])
}
@@ -429,7 +423,7 @@ object TraversableOnce {
def flatten: Iterator[A] = new AbstractIterator[A] {
val its = travs.toIterator
private var it: Iterator[A] = Iterator.empty
- def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next.toIterator; hasNext }
+ def hasNext: Boolean = it.hasNext || its.hasNext && { it = its.next().toIterator; hasNext }
def next(): A = if (hasNext) it.next() else Iterator.empty.next()
}
}
diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala
index 568298a9d9..65936da0e4 100644
--- a/src/library/scala/collection/TraversableProxy.scala
+++ b/src/library/scala/collection/TraversableProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
// Methods could be printed by cat TraversableLike.scala | egrep '^ (override )?def'
diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala
index 8896cd1b0f..77d651c5f2 100644
--- a/src/library/scala/collection/TraversableProxyLike.scala
+++ b/src/library/scala/collection/TraversableProxyLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{Buffer, StringBuilder}
diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala
index cce6b72257..bbb5bde464 100644
--- a/src/library/scala/collection/TraversableView.scala
+++ b/src/library/scala/collection/TraversableView.scala
@@ -6,11 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.Builder
-import TraversableView.NoBuilder
/** A base trait for non-strict views of traversable collections.
* $traversableViewInfo
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 14f865c2f0..c507e000ee 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -6,11 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
import generic._
import mutable.{ Builder, ArrayBuffer }
-import TraversableView.NoBuilder
import scala.annotation.migration
import scala.language.implicitConversions
@@ -59,7 +59,7 @@ trait ViewMkString[+A] {
* $viewInfo
*
* All views for traversable collections are defined by creating a new `foreach` method.
- *
+ *
* @author Martin Odersky
* @version 2.8
* @since 2.8
@@ -162,7 +162,7 @@ trait TraversableViewLike[+A,
// if (b.isInstanceOf[NoBuilder[_]]) newFlatMapped(f).asInstanceOf[That]
// else super.flatMap[B, That](f)(bf)
}
- override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
+ override def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]) =
newFlatMapped(asTraversable)
private[this] implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
diff --git a/src/library/scala/collection/concurrent/BasicNode.java b/src/library/scala/collection/concurrent/BasicNode.java
index a65d84bbf8..97b8870036 100644
--- a/src/library/scala/collection/concurrent/BasicNode.java
+++ b/src/library/scala/collection/concurrent/BasicNode.java
@@ -8,13 +8,8 @@
package scala.collection.concurrent;
-
-
-
-
-
public abstract class BasicNode {
-
+
public abstract String string(int lev);
-
-} \ No newline at end of file
+
+}
diff --git a/src/library/scala/collection/concurrent/CNodeBase.java b/src/library/scala/collection/concurrent/CNodeBase.java
index d6eb29c8df..2fce971b2b 100644
--- a/src/library/scala/collection/concurrent/CNodeBase.java
+++ b/src/library/scala/collection/concurrent/CNodeBase.java
@@ -8,28 +8,26 @@
package scala.collection.concurrent;
-
-
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
+abstract class CNodeBase<K, V> extends MainNode<K, V> {
+ @SuppressWarnings("rawtypes")
+ public static final AtomicIntegerFieldUpdater<CNodeBase> updater =
+ AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
-abstract class CNodeBase<K, V> extends MainNode<K, V> {
-
- public static final AtomicIntegerFieldUpdater<CNodeBase> updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize");
-
public volatile int csize = -1;
-
+
public boolean CAS_SIZE(int oldval, int nval) {
return updater.compareAndSet(this, oldval, nval);
}
-
+
public void WRITE_SIZE(int nval) {
updater.set(this, nval);
}
-
+
public int READ_SIZE() {
return updater.get(this);
}
-
+
} \ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Gen.java b/src/library/scala/collection/concurrent/Gen.java
index 331eeca16b..6019884683 100644
--- a/src/library/scala/collection/concurrent/Gen.java
+++ b/src/library/scala/collection/concurrent/Gen.java
@@ -8,11 +8,4 @@
package scala.collection.concurrent;
-
-
-
-
-
-final class Gen {
-}
-
+final class Gen {}
diff --git a/src/library/scala/collection/concurrent/INodeBase.java b/src/library/scala/collection/concurrent/INodeBase.java
index cbe404edf6..2f2d203287 100644
--- a/src/library/scala/collection/concurrent/INodeBase.java
+++ b/src/library/scala/collection/concurrent/INodeBase.java
@@ -8,28 +8,26 @@
package scala.collection.concurrent;
-
-
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+abstract class INodeBase<K, V> extends BasicNode {
+ @SuppressWarnings("rawtypes")
+ public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater =
+ AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
-abstract class INodeBase<K, V> extends BasicNode {
-
- public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode");
-
public static final Object RESTART = new Object();
-
+
public volatile MainNode<K, V> mainnode = null;
-
+
public final Gen gen;
-
+
public INodeBase(Gen generation) {
gen = generation;
}
-
+
public BasicNode prev() {
return null;
}
-
+
} \ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java
index ffe535742e..adb9b59a3d 100644
--- a/src/library/scala/collection/concurrent/MainNode.java
+++ b/src/library/scala/collection/concurrent/MainNode.java
@@ -8,33 +8,32 @@
package scala.collection.concurrent;
-
-
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
+abstract class MainNode<K, V> extends BasicNode {
+ @SuppressWarnings("rawtypes")
+ public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater =
+ AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
-abstract class MainNode<K, V> extends BasicNode {
-
- public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev");
-
public volatile MainNode<K, V> prev = null;
-
+
public abstract int cachedSize(Object ct);
-
+
public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) {
return updater.compareAndSet(this, oldval, nval);
}
-
+
public void WRITE_PREV(MainNode<K, V> nval) {
updater.set(this, nval);
}
-
+
// do we need this? unclear in the javadocs...
// apparently not - volatile reads are supposed to be safe
// irregardless of whether there are concurrent ARFU updates
+ @Deprecated @SuppressWarnings("unchecked")
public MainNode<K, V> READ_PREV() {
return updater.get(this);
}
-
+
} \ No newline at end of file
diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala
index b2276ce5aa..02e5dd01f5 100644
--- a/src/library/scala/collection/concurrent/Map.scala
+++ b/src/library/scala/collection/concurrent/Map.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.concurrent
+package scala
+package collection.concurrent
/** A template trait for mutable maps that allow concurrent access.
*
diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala
index 6c11c5bcb5..0b5ceeb1c7 100644
--- a/src/library/scala/collection/concurrent/TrieMap.scala
+++ b/src/library/scala/collection/concurrent/TrieMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package concurrent
import java.util.concurrent.atomic._
@@ -41,7 +42,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
@tailrec private def GCAS_Complete(m: MainNode[K, V], ct: TrieMap[K, V]): MainNode[K, V] = if (m eq null) null else {
// complete the GCAS
val prev = /*READ*/m.prev
- val ctr = ct.readRoot(true)
+ val ctr = ct.readRoot(abort = true)
prev match {
case null =>
@@ -250,7 +251,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends
if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct)
else {
if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct)
- else return RESTART // used to be throw RestartException
+ else RESTART // used to be throw RestartException
}
case sn: SNode[K, V] => // 2) singleton node
if (sn.hc == hc && equal(sn.k, k, ct)) sn.v.asInstanceOf[AnyRef]
@@ -428,16 +429,16 @@ extends MainNode[K, V] with KVNode[K, V] {
}
-private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V])
+private[collection] final class LNode[K, V](final val listmap: immutable.ListMap[K, V])
extends MainNode[K, V] {
- def this(k: K, v: V) = this(ImmutableListMap(k -> v))
- def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2))
+ def this(k: K, v: V) = this(immutable.ListMap(k -> v))
+ def this(k1: K, v1: V, k2: K, v2: V) = this(immutable.ListMap(k1 -> v1, k2 -> v2))
def inserted(k: K, v: V) = new LNode(listmap + ((k, v)))
def removed(k: K, ct: TrieMap[K, V]): MainNode[K, V] = {
val updmap = listmap - k
if (updmap.size > 1) new LNode(updmap)
else {
- val (k, v) = updmap.iterator.next
+ val (k, v) = updmap.iterator.next()
new TNode(k, v, ct.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses
}
}
@@ -545,7 +546,7 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba
// removed (those existing when the op began)
// - if there are only null-i-nodes below, returns null
def toCompressed(ct: TrieMap[K, V], lev: Int, gen: Gen) = {
- var bmp = bitmap
+ val bmp = bitmap
var i = 0
val arr = array
val tmparray = new Array[BasicNode](arr.length)
@@ -723,7 +724,7 @@ extends scala.collection.concurrent.Map[K, V]
private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = {
val desc = RDCSS_Descriptor(ov, expectedmain, nv)
if (CAS_ROOT(ov, desc)) {
- RDCSS_Complete(false)
+ RDCSS_Complete(abort = false)
/*READ*/desc.committed
} else false
}
@@ -920,8 +921,8 @@ object TrieMap extends MutableMapFactory[TrieMap] {
private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: TrieMap[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] {
- private var stack = new Array[Array[BasicNode]](7)
- private var stackpos = new Array[Int](7)
+ private val stack = new Array[Array[BasicNode]](7)
+ private val stackpos = new Array[Int](7)
private var depth = -1
private var subiter: Iterator[(K, V)] = null
private var current: KVNode[K, V] = null
@@ -1027,7 +1028,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct:
val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2)
stack(d) = arr1
stackpos(d) = -1
- val it = newIterator(level + 1, ct, false)
+ val it = newIterator(level + 1, ct, _mustInit = false)
it.stack(0) = arr2
it.stackpos(0) = -1
it.depth = 0
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
index 87bcae3923..498bdc5943 100644
--- a/src/library/scala/collection/convert/DecorateAsJava.scala
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -25,7 +26,7 @@ import scala.language.implicitConversions
* - `scala.collection.mutable.Buffer` <=> `java.util.List`
* - `scala.collection.mutable.Set` <=> `java.util.Set`
* - `scala.collection.mutable.Map` <=> `java.util.Map`
- * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ * - `scala.collection.mutable.concurrent.Map` <=> `java.util.concurrent.ConcurrentMap`
*
* In all cases, converting from a source type to a target type and back
* again will return the original source object, e.g.
@@ -279,26 +280,6 @@ trait DecorateAsJava {
/**
* Adds an `asJava` method that implicitly converts a Scala mutable
- * `ConcurrentMap` to a Java `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `ConcurrentMap` view of the argument.
- */
- @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable
* `concurrent.Map` to a Java `ConcurrentMap`.
*
* The returned Java `ConcurrentMap` is backed by the provided Scala
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
index 94847a76e3..c724831c54 100644
--- a/src/library/scala/collection/convert/DecorateAsScala.scala
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -144,25 +145,6 @@ trait DecorateAsScala {
/**
* Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
- * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
- * backed by the provided Java `ConcurrentMap` and any side-effects of using
- * it via the Scala interface will be visible via the Java interface and
- * vice versa.
- *
- * If the Java `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `ConcurrentMap` view of the argument.
- */
- @deprecated("Use `mapAsScalaConcurrentMapConverter` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
- new AsScala(asScalaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
* to a Scala mutable `concurrent.Map`. The returned Scala `concurrent.Map` is
* backed by the provided Java `ConcurrentMap` and any side-effects of using
* it via the Scala interface will be visible via the Java interface and
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
index e2c46c1e4f..d232fa04e1 100644
--- a/src/library/scala/collection/convert/Decorators.scala
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -6,10 +6,11 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
-import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import java.{ util => ju }
private[collection] trait Decorators {
/** Generic class containing the `asJava` converter method */
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
index 5e6126a7cf..e75a0e2981 100644
--- a/src/library/scala/collection/convert/WrapAsJava.scala
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -235,27 +236,6 @@ trait WrapAsJava {
}
/**
- * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
- * `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asScalaConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java ConcurrentMap will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return A Java `ConcurrentMap` view of the argument.
- */
- @deprecated("Use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
- case JConcurrentMapDeprecatedWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapDeprecatedWrapper(m)
- }
-
- /**
* Implicitly converts a Scala mutable `concurrent.Map` to a Java
* `ConcurrentMap`.
*
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
index ffcca62291..d4ab451b0d 100644
--- a/src/library/scala/collection/convert/WrapAsScala.scala
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -6,36 +6,14 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import scala.language.implicitConversions
-trait LowPriorityWrapAsScala {
- this: WrapAsScala =>
-
- import Wrappers._
-
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
- @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- implicit def mapAsScalaDeprecatedConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] =
- asScalaConcurrentMap(m)
-}
-
-trait WrapAsScala extends LowPriorityWrapAsScala {
+trait WrapAsScala {
import Wrappers._
/**
* Implicitly converts a Java `Iterator` to a Scala `Iterator`.
@@ -178,25 +156,6 @@ trait WrapAsScala extends LowPriorityWrapAsScala {
* @param m The ConcurrentMap to be converted.
* @return A Scala mutable ConcurrentMap view of the argument.
*/
- @deprecated("Use `mapAsScalaConcurrentMap` instead, and use `concurrent.Map` instead of `ConcurrentMap`.", "2.10.0")
- def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
- case cmw: ConcurrentMapDeprecatedWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapDeprecatedWrapper(m)
- }
-
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
implicit def mapAsScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): concurrent.Map[A, B] = m match {
case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
case _ => new JConcurrentMapWrapper(m)
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 20add3365d..4410ddc7d8 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
@@ -27,9 +28,9 @@ private[collection] trait Wrappers {
case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
def hasNext = underlying.hasNext
- def next() = underlying.next
+ def next() = underlying.next()
def hasMoreElements = underlying.hasNext
- def nextElement() = underlying.next
+ def nextElement() = underlying.next()
def remove() = throw new UnsupportedOperationException
}
@@ -81,7 +82,7 @@ private[collection] trait Wrappers {
override def remove(i: Int) = underlying remove i
}
- case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
+ case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
def length = underlying.size
override def isEmpty = underlying.isEmpty
override def iterator: Iterator[A] = underlying.iterator
@@ -108,7 +109,7 @@ private[collection] trait Wrappers {
val ui = underlying.iterator
var prev: Option[A] = None
def hasNext = ui.hasNext
- def next = { val e = ui.next; prev = Some(e); e }
+ def next = { val e = ui.next(); prev = Some(e); e }
def remove = prev match {
case Some(e) =>
underlying match {
@@ -180,7 +181,7 @@ private[collection] trait Wrappers {
def hasNext = ui.hasNext
def next() = {
- val (k, v) = ui.next
+ val (k, v) = ui.next()
prev = Some(k)
new ju.Map.Entry[A, B] {
import scala.util.hashing.byteswap32
@@ -272,32 +273,10 @@ private[collection] trait Wrappers {
override def empty: Repr = null.asInstanceOf[Repr]
}
- case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
+ case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
override def empty = JMapWrapper(new ju.HashMap[A, B])
}
- class ConcurrentMapDeprecatedWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
-
- def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def remove(k: AnyRef, v: AnyRef) = try {
- underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
- } catch {
- case ex: ClassCastException =>
- false
- }
-
- def replace(k: A, v: B): B = underlying.replace(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
- }
-
class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
@@ -320,32 +299,7 @@ private[collection] trait Wrappers {
def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
}
- case class JConcurrentMapDeprecatedWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapDeprecatedWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
- override def get(k: A) = {
- val v = underlying get k
- if (v != null) Some(v)
- else None
- }
-
- override def empty = new JConcurrentMapDeprecatedWrapper(new juc.ConcurrentHashMap[A, B])
-
- def putIfAbsent(k: A, v: B): Option[B] = {
- val r = underlying.putIfAbsent(k, v)
- if (r != null) Some(r) else None
- }
-
- def remove(k: A, v: B): Boolean = underlying.remove(k, v)
-
- def replace(k: A, v: B): Option[B] = {
- val prev = underlying.replace(k, v)
- if (prev != null) Some(prev) else None
- }
-
- def replace(k: A, oldvalue: B, newvalue: B): Boolean =
- underlying.replace(k, oldvalue, newvalue)
- }
-
- case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
+ case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
override def get(k: A) = {
val v = underlying get k
if (v != null) Some(v)
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
index ea66101aca..13970f9a3e 100644
--- a/src/library/scala/collection/convert/package.scala
+++ b/src/library/scala/collection/convert/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package object convert {
val decorateAsJava = new DecorateAsJava { }
diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala
index c45ebcf982..d430ece2f5 100644
--- a/src/library/scala/collection/generic/BitOperations.scala
+++ b/src/library/scala/collection/generic/BitOperations.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Some bit operations.
diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala
index 46e2d29612..2e3aae31ac 100644
--- a/src/library/scala/collection/generic/BitSetFactory.scala
+++ b/src/library/scala/collection/generic/BitSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.collection._
diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala
index 73fd4fc026..24e5b2a1dd 100644
--- a/src/library/scala/collection/generic/CanBuildFrom.scala
+++ b/src/library/scala/collection/generic/CanBuildFrom.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala
index 9ca3332ba0..7f70b4580a 100644
--- a/src/library/scala/collection/generic/CanCombineFrom.scala
+++ b/src/library/scala/collection/generic/CanCombineFrom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.collection.parallel._
diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
index 85cdbd7276..e3db40123d 100644
--- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
+++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala
index a04ecb2a68..3c496051c4 100644
--- a/src/library/scala/collection/generic/Clearable.scala
+++ b/src/library/scala/collection/generic/Clearable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** This trait forms part of collections that can be cleared
diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala
index e21f0be898..8aefbdb926 100755
--- a/src/library/scala/collection/generic/FilterMonadic.scala
+++ b/src/library/scala/collection/generic/FilterMonadic.scala
@@ -6,8 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
-
+package scala
+package collection
+package generic
/** A template trait that contains just the `map`, `flatMap`, `foreach` and `withFilter` methods
* of trait `TraversableLike`.
diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala
index e869bba51a..ae3150115f 100644
--- a/src/library/scala/collection/generic/GenMapFactory.scala
+++ b/src/library/scala/collection/generic/GenMapFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.{Builder, MapBuilder}
@@ -44,7 +45,7 @@ abstract class GenMapFactory[CC[A, B] <: GenMap[A, B] with GenMapLike[A, B, CC[A
* @tparam B the type of the associated values
* @return a new $coll consisting key/value pairs given by `elems`.
*/
- def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result
+ def apply[A, B](elems: (A, B)*): CC[A, B] = (newBuilder[A, B] ++= elems).result()
/** The default builder for $Coll objects.
* @tparam A the type of the keys
diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala
index dd375c567c..6afbb2e2fb 100644
--- a/src/library/scala/collection/generic/GenSeqFactory.scala
+++ b/src/library/scala/collection/generic/GenSeqFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala
index 9774805cf8..800f66eb53 100644
--- a/src/library/scala/collection/generic/GenSetFactory.scala
+++ b/src/library/scala/collection/generic/GenSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index 2d3f7e609b..2092c0c5f5 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
@@ -38,12 +39,10 @@ import scala.language.higherKinds
abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
- // A default implementation of GenericCanBuildFrom which can be cast
- // to whatever is desired.
- private class ReusableCBF extends GenericCanBuildFrom[Nothing] {
+ private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
- lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF
+ def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance
/** A generic implementation of the `CanBuildFrom` trait, which forwards
* all calls to `apply(from)` to the `genericBuilder` method of
@@ -75,7 +74,7 @@ extends GenericCompanion[CC] {
b.sizeHint(xss.map(_.size).sum)
for (xs <- xss.seq) b ++= xs
- b.result
+ b.result()
}
/** Produces a $coll containing the results of some element computation a number of times.
@@ -91,7 +90,7 @@ extends GenericCompanion[CC] {
b += elem
i += 1
}
- b.result
+ b.result()
}
/** Produces a two-dimensional $coll containing the results of some element computation a number of times.
@@ -149,7 +148,7 @@ extends GenericCompanion[CC] {
b += f(i)
i += 1
}
- b.result
+ b.result()
}
/** Produces a two-dimensional $coll containing values of a given function over ranges of integer values starting from 0.
@@ -218,13 +217,13 @@ extends GenericCompanion[CC] {
if (step == zero) throw new IllegalArgumentException("zero step")
val b = newBuilder[T]
- b sizeHint immutable.NumericRange.count(start, end, step, false)
+ b sizeHint immutable.NumericRange.count(start, end, step, isInclusive = false)
var i = start
while (if (step < zero) end < i else i < end) {
b += i
i += step
}
- b.result
+ b.result()
}
/** Produces a $coll containing repeated applications of a function to a start value.
@@ -248,7 +247,6 @@ extends GenericCompanion[CC] {
b += acc
}
}
- b.result
+ b.result()
}
}
-
diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
index a587bbf544..a8ac2bf738 100644
--- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala
+++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
@@ -19,15 +20,15 @@ import scala.reflect.ClassTag
* @author Aleksandar Prokopec
*/
abstract class GenericClassTagCompanion[+CC[X] <: Traversable[X]] {
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
def newBuilder[A](implicit ord: ClassTag[A]): Builder[A, CC[A]]
- def empty[A: ClassTag]: CC[A] = newBuilder[A].result
+ def empty[A: ClassTag]: CC[A] = newBuilder[A].result()
def apply[A](elems: A*)(implicit ord: ClassTag[A]): CC[A] = {
val b = newBuilder[A]
b ++= elems
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
index f327710848..090cd729a4 100644
--- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala
index 5b03f8e5c6..67d0a9c7f7 100644
--- a/src/library/scala/collection/generic/GenericCompanion.scala
+++ b/src/library/scala/collection/generic/GenericCompanion.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
@@ -24,7 +25,7 @@ import scala.language.higherKinds
*/
abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** The underlying collection type with unknown element type */
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
/** The default builder for `$Coll` objects.
* @tparam A the type of the ${coll}'s elements
@@ -34,7 +35,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
/** An empty collection of type `$Coll[A]`
* @tparam A the type of the ${coll}'s elements
*/
- def empty[A]: CC[A] = newBuilder[A].result
+ def empty[A]: CC[A] = newBuilder[A].result()
/** Creates a $coll with the specified elements.
* @tparam A the type of the ${coll}'s elements
@@ -46,7 +47,7 @@ abstract class GenericCompanion[+CC[X] <: GenTraversable[X]] {
else {
val b = newBuilder[A]
b ++= elems
- b.result
+ b.result()
}
}
}
diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
index a9a50a1c35..5b328bff6c 100644
--- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala
+++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
@@ -19,16 +20,16 @@ import scala.language.higherKinds
* @since 2.8
*/
abstract class GenericOrderedCompanion[+CC[X] <: Traversable[X]] {
- type Coll = CC[_]
+ protected[this] type Coll = CC[_]
def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]]
- def empty[A: Ordering]: CC[A] = newBuilder[A].result
+ def empty[A: Ordering]: CC[A] = newBuilder[A].result()
def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = {
val b = newBuilder[A]
b ++= elems
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
index a624e8ca93..c1a41ce7c4 100644
--- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala
index bb39461e7e..432b9135f8 100644
--- a/src/library/scala/collection/generic/GenericParCompanion.scala
+++ b/src/library/scala/collection/generic/GenericParCompanion.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala
index 94c76630d6..b9b7043270 100644
--- a/src/library/scala/collection/generic/GenericParTemplate.scala
+++ b/src/library/scala/collection/generic/GenericParTemplate.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.Combiner
import scala.collection.parallel.ParIterable
diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala
index 8b2f8a0fcb..34b20f23a2 100644
--- a/src/library/scala/collection/generic/GenericSeqCompanion.scala
+++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala
index ecfdcffbc5..2cadd14948 100644
--- a/src/library/scala/collection/generic/GenericSetTemplate.scala
+++ b/src/library/scala/collection/generic/GenericSetTemplate.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
/**
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index f7a8a9aa88..cd48cd23f4 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
@@ -73,11 +74,20 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
/** Converts this $coll of pairs into two collections of the first and second
* half of each pair.
*
+ * {{{
+ * val xs = $Coll(
+ * (1, "one"),
+ * (2, "two"),
+ * (3, "three")).unzip
+ * // xs == ($Coll(1, 2, 3),
+ * // $Coll(one, two, three))
+ * }}}
+ *
* @tparam A1 the type of the first half of the element pairs
* @tparam A2 the type of the second half of the element pairs
* @param asPair an implicit conversion which asserts that the element type
* of this $coll is a pair.
- * @return a pair ${coll}s, containing the first, respectively second
+ * @return a pair of ${coll}s, containing the first, respectively second
* half of each element pair of this $coll.
*/
def unzip[A1, A2](implicit asPair: A => (A1, A2)): (CC[A1], CC[A2]) = {
@@ -88,18 +98,28 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
b1 += x
b2 += y
}
- (b1.result, b2.result)
+ (b1.result(), b2.result())
}
/** Converts this $coll of triples into three collections of the first, second,
* and third element of each triple.
*
+ * {{{
+ * val xs = $Coll(
+ * (1, "one", '1'),
+ * (2, "two", '2'),
+ * (3, "three", '3')).unzip3
+ * // xs == ($Coll(1, 2, 3),
+ * // $Coll(one, two, three),
+ * // $Coll(1, 2, 3))
+ * }}}
+ *
* @tparam A1 the type of the first member of the element triples
* @tparam A2 the type of the second member of the element triples
* @tparam A3 the type of the third member of the element triples
* @param asTriple an implicit conversion which asserts that the element type
* of this $coll is a triple.
- * @return a triple ${coll}s, containing the first, second, respectively
+ * @return a triple of ${coll}s, containing the first, second, respectively
* third member of each element triple of this $coll.
*/
def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)): (CC[A1], CC[A2], CC[A3]) = {
@@ -113,7 +133,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
b2 += y
b3 += z
}
- (b1.result, b2.result, b3.result)
+ (b1.result(), b2.result(), b3.result())
}
/** Converts this $coll of traversable collections into
@@ -133,10 +153,16 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* static type of $coll. For example:
*
* {{{
- * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+ * val xs = List(
+ * Set(1, 2, 3),
+ * Set(1, 2, 3)
+ * ).flatten
* // xs == List(1, 2, 3, 1, 2, 3)
*
- * val ys = Set(List(1, 2, 3), List(3, 2, 1))
+ * val ys = Set(
+ * List(1, 2, 3),
+ * List(3, 2, 1)
+ * ).flatten
* // ys == Set(1, 2, 3)
* }}}
*/
@@ -144,12 +170,33 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
val b = genericBuilder[B]
for (xs <- sequential)
b ++= asTraversable(xs).seq
- b.result
+ b.result()
}
/** Transposes this $coll of traversable collections into
* a $coll of ${coll}s.
*
+ * The resulting collection's type will be guided by the
+ * static type of $coll. For example:
+ *
+ * {{{
+ * val xs = List(
+ * Set(1, 2, 3),
+ * Set(4, 5, 6)).transpose
+ * // xs == List(
+ * // List(1, 4),
+ * // List(2, 5),
+ * // List(3, 6))
+ *
+ * val ys = Vector(
+ * List(1, 2, 3),
+ * List(4, 5, 6)).transpose
+ * // ys == Vector(
+ * // Vector(1, 4),
+ * // Vector(2, 5),
+ * // Vector(3, 6))
+ * }}}
+ *
* @tparam B the type of the elements of each traversable collection.
* @param asTraversable an implicit conversion which asserts that the
* element type of this $coll is a `Traversable`.
@@ -161,7 +208,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
@migration("`transpose` throws an `IllegalArgumentException` if collections are not uniformly sized.", "2.9.0")
def transpose[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[CC[B] @uncheckedVariance] = {
if (isEmpty)
- return genericBuilder[CC[B]].result
+ return genericBuilder[CC[B]].result()
def fail = throw new IllegalArgumentException("transpose requires all collections have the same size")
@@ -179,7 +226,7 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
}
val bb = genericBuilder[CC[B]]
for (b <- bs) bb += b.result
- bb.result
+ bb.result()
}
}
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index cb75212e3d..254d4566be 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -6,10 +6,12 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package generic
+import scala.annotation.tailrec
+
/** This trait forms part of collections that can be augmented
* using a `+=` operator and that can be cleared of all elements using
* a `clear` method.
@@ -45,7 +47,19 @@ trait Growable[-A] extends Clearable {
* @param xs the TraversableOnce producing the elements to $add.
* @return the $coll itself.
*/
- def ++=(xs: TraversableOnce[A]): this.type = { xs.seq foreach += ; this }
+ def ++=(xs: TraversableOnce[A]): this.type = {
+ @tailrec def loop(xs: scala.collection.LinearSeq[A]) {
+ if (xs.nonEmpty) {
+ this += xs.head
+ loop(xs.tail)
+ }
+ }
+ xs.seq match {
+ case xs: scala.collection.LinearSeq[_] => loop(xs)
+ case xs => xs foreach +=
+ }
+ this
+ }
/** Clears the $coll's contents. After this operation, the
* $coll is empty.
diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala
index 1a981b487f..aa0ce6698d 100755
--- a/src/library/scala/collection/generic/HasNewBuilder.scala
+++ b/src/library/scala/collection/generic/HasNewBuilder.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala
index 1ecfba19af..99a0722c3d 100644
--- a/src/library/scala/collection/generic/HasNewCombiner.scala
+++ b/src/library/scala/collection/generic/HasNewCombiner.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala
index 4ce50a31f9..7d857bf1b4 100644
--- a/src/library/scala/collection/generic/ImmutableMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala
index 2e960e670d..f4d4e061bb 100644
--- a/src/library/scala/collection/generic/ImmutableSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.{ Builder, SetBuilder }
diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
index 7743fc2281..730e58a527 100644
--- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
index 9914557b51..1fd4a8c99d 100644
--- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/ScalaObject.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala
index f67dc3a6c5..ddc0141aa9 100644
--- a/src/library/scala/ScalaObject.scala
+++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala
@@ -7,10 +7,16 @@
\* */
package scala
+package collection
+package generic
-/** Until scala 2.10.0 this marker trait was added to
- * scala-compiled classes. Now it only exists for backward
- * compatibility.
+import language.higherKinds
+
+/** A template for companion objects of IndexedSeq and subclasses thereof.
+ *
+ * @since 2.11
*/
-@deprecated("ScalaObject will be removed", "2.10.0")
-trait ScalaObject
+abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] {
+ override def ReusableCBF: GenericCanBuildFrom[Nothing] =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+}
diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala
new file mode 100644
index 0000000000..189aea4632
--- /dev/null
+++ b/src/library/scala/collection/generic/IsSeqLike.scala
@@ -0,0 +1,58 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+package collection
+package generic
+
+/** Type class witnessing that a collection representation type `Repr` has
+ * elements of type `A` and has a conversion to `SeqLike[A, Repr]`.
+ *
+ * This type enables simple enrichment of `Seq`s with extension methods which
+ * can make full use of the mechanics of the Scala collections framework in
+ * their implementation.
+ *
+ * Example usage:
+ * {{{
+ * class FilterMapImpl[A, Repr](val r: SeqLike[A, Repr]) {
+ * final def filterMap[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That =
+ * r.flatMap(f(_))
+ * }
+ * implicit def filterMap[Repr, A](r: Repr)(implicit fr: IsSeqLike[Repr]): FilterMapImpl[fr.A,Repr] =
+ * new FilterMapImpl(fr.conversion(r))
+ *
+ * val l = List(1, 2, 3, 4, 5)
+ * List(1, 2, 3, 4, 5) filterMap (i => if(i % 2 == 0) Some(i) else None)
+ * // == List(2, 4)
+ * }}}
+ *
+ * @see [[scala.collection.Seq]]
+ * @see [[scala.collection.generic.IsTraversableLike]]
+ */
+trait IsSeqLike[Repr] {
+ /** The type of elements we can traverse over. */
+ type A
+ /** A conversion from the representation type `Repr` to a `SeqLike[A,Repr]`. */
+ val conversion: Repr => SeqLike[A, Repr]
+}
+
+object IsSeqLike {
+ import scala.language.higherKinds
+
+ implicit val stringRepr: IsSeqLike[String] { type A = Char } =
+ new IsSeqLike[String] {
+ type A = Char
+ val conversion = implicitly[String => SeqLike[Char, String]]
+ }
+
+ implicit def seqLikeRepr[C[_], A0](implicit conv: C[A0] => SeqLike[A0,C[A0]]): IsSeqLike[C[A0]] { type A = A0 } =
+ new IsSeqLike[C[A0]] {
+ type A = A0
+ val conversion = conv
+ }
+}
diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala
index c70772d8f9..22cef555cc 100644
--- a/src/library/scala/collection/generic/IsTraversableLike.scala
+++ b/src/library/scala/collection/generic/IsTraversableLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** A trait which can be used to avoid code duplication when defining extension
diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala
index bb5404c92d..3ee586ae63 100644
--- a/src/library/scala/collection/generic/IsTraversableOnce.scala
+++ b/src/library/scala/collection/generic/IsTraversableOnce.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Type class witnessing that a collection representation type `Repr` has
diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala
index 90ebcace84..5a54ed7f78 100644
--- a/src/library/scala/collection/generic/IterableForwarder.scala
+++ b/src/library/scala/collection/generic/IterableForwarder.scala
@@ -6,13 +6,12 @@
** |/ **
\* */
+package scala
+package collection
+package generic
-
-package scala.collection.generic
import scala.collection._
-import scala.collection.mutable.Buffer
-
/** This trait implements a forwarder for iterable objects. It forwards
* all calls to a different iterable object, except for
*
diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala
index 565850bee2..b9f3d4b010 100644
--- a/src/library/scala/collection/generic/MapFactory.scala
+++ b/src/library/scala/collection/generic/MapFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala
index ac139cc80c..14c5b6bac3 100644
--- a/src/library/scala/collection/generic/MutableMapFactory.scala
+++ b/src/library/scala/collection/generic/MutableMapFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala
index 9c69d53608..63944657fc 100644
--- a/src/library/scala/collection/generic/MutableSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSetFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import mutable.{ Builder, GrowingBuilder }
diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
index b9be83c3c4..0339a523e9 100644
--- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala
+++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.collection.mutable.{ Builder, GrowingBuilder }
diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
index a2de108721..7657aff2aa 100644
--- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala
+++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala
index bb88d26dec..486e2a115e 100644
--- a/src/library/scala/collection/generic/ParFactory.scala
+++ b/src/library/scala/collection/generic/ParFactory.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.ParIterable
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala
index 0a6b08ae34..70797c83e2 100644
--- a/src/library/scala/collection/generic/ParMapFactory.scala
+++ b/src/library/scala/collection/generic/ParMapFactory.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.parallel.ParMap
import scala.collection.parallel.ParMapLike
diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala
index 3727ab89f7..4320635ae6 100644
--- a/src/library/scala/collection/generic/ParSetFactory.scala
+++ b/src/library/scala/collection/generic/ParSetFactory.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection.mutable.Builder
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala
index a66074741a..35cce11a79 100644
--- a/src/library/scala/collection/generic/SeqFactory.scala
+++ b/src/library/scala/collection/generic/SeqFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala
index e8b15ec450..c23b818c00 100644
--- a/src/library/scala/collection/generic/SeqForwarder.scala
+++ b/src/library/scala/collection/generic/SeqForwarder.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection._
import scala.collection.immutable.Range
@@ -50,7 +52,7 @@ trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] {
override def lastIndexOfSlice[B >: A](that: GenSeq[B]): Int = underlying lastIndexOfSlice that
override def lastIndexOfSlice[B >: A](that: GenSeq[B], end: Int): Int = underlying.lastIndexOfSlice(that, end)
override def containsSlice[B](that: GenSeq[B]): Boolean = underlying containsSlice that
- override def contains(elem: Any): Boolean = underlying contains elem
+ override def contains[A1 >: A](elem: A1): Boolean = underlying contains elem
override def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = underlying.corresponds(that)(p)
override def indices: Range = underlying.indices
}
diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala
index e9bbde92f3..fcd8d00c18 100644
--- a/src/library/scala/collection/generic/SetFactory.scala
+++ b/src/library/scala/collection/generic/SetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.Builder
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index b00048fd5d..b7412afde0 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
/** This trait forms part of collections that can be reduced
diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala
index 498db7f8fa..e62eb6ff09 100644
--- a/src/library/scala/collection/generic/Signalling.scala
+++ b/src/library/scala/collection/generic/Signalling.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import java.util.concurrent.atomic.AtomicInteger
@@ -140,7 +142,7 @@ trait AtomicIndexFlag extends Signalling {
val old = intflag.get
if (f <= old) loop = false
else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
+ } while (loop)
}
abstract override def setIndexFlagIfLesser(f: Int) = {
var loop = true
@@ -148,7 +150,7 @@ trait AtomicIndexFlag extends Signalling {
val old = intflag.get
if (f >= old) loop = false
else if (intflag.compareAndSet(old, f)) loop = false
- } while (loop);
+ } while (loop)
}
}
@@ -163,7 +165,7 @@ trait DelegatedSignalling extends Signalling {
var signalDelegate: Signalling
def isAborted = signalDelegate.isAborted
- def abort() = signalDelegate.abort
+ def abort() = signalDelegate.abort()
def indexFlag = signalDelegate.indexFlag
def setIndexFlag(f: Int) = signalDelegate.setIndexFlag(f)
diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala
index 1191259b3a..73584ce82e 100644
--- a/src/library/scala/collection/generic/Sizing.scala
+++ b/src/library/scala/collection/generic/Sizing.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
/** A trait for objects which have a size.
*/
diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala
index 244e960454..82acdd1371 100644
--- a/src/library/scala/collection/generic/SliceInterval.scala
+++ b/src/library/scala/collection/generic/SliceInterval.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** A container for the endpoints of a collection slice.
diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala
index f962b26bd3..3876da3275 100644
--- a/src/library/scala/collection/generic/Sorted.scala
+++ b/src/library/scala/collection/generic/Sorted.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package generic
/** Any collection (including maps) whose keys (or elements) are ordered.
@@ -71,28 +72,40 @@ trait Sorted[K, +This <: Sorted[K, This]] {
def to(to: K): This = {
val i = keySet.from(to).iterator
if (i.isEmpty) return repr
- val next = i.next
+ val next = i.next()
if (compare(next, to) == 0)
if (i.isEmpty) repr
- else until(i.next)
+ else until(i.next())
else
until(next)
}
+
+ /**
+ * Creates an iterator over all the keys(or elements) contained in this
+ * collection greater than or equal to `start`
+ * according to the ordering of this collection. x.keysIteratorFrom(y)
+ * is equivalent to but often more efficient than
+ * x.from(y).keysIterator.
+ *
+ * @param start The lower bound (inclusive)
+ * on the keys to be returned
+ */
+ def keysIteratorFrom(start: K): Iterator[K]
protected def hasAll(j: Iterator[K]): Boolean = {
val i = keySet.iterator
if (i.isEmpty) return j.isEmpty
- var in = i.next;
+ var in = i.next()
while (j.hasNext) {
- val jn = j.next;
+ val jn = j.next()
while ({
- val n = compare(jn, in);
- if (n == 0) false;
- else if (n < 0) return false;
- else if (!i.hasNext) return false;
- else true;
- }) in = i.next;
+ val n = compare(jn, in)
+ if (n == 0) false
+ else if (n < 0) return false
+ else if (!i.hasNext) return false
+ else true
+ }) in = i.next()
}
true
}
diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala
index 17201b0f7a..afa11e9ab1 100644
--- a/src/library/scala/collection/generic/SortedMapFactory.scala
+++ b/src/library/scala/collection/generic/SortedMapFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.{Builder, MapBuilder}
@@ -24,7 +25,7 @@ abstract class SortedMapFactory[CC[A, B] <: SortedMap[A, B] with SortedMapLike[A
def empty[A, B](implicit ord: Ordering[A]): CC[A, B]
- def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result
+ def apply[A, B](elems: (A, B)*)(implicit ord: Ordering[A]): CC[A, B] = (newBuilder[A, B](ord) ++= elems).result()
def newBuilder[A, B](implicit ord: Ordering[A]): Builder[(A, B), CC[A, B]] =
new MapBuilder[A, B, CC[A, B]](empty(ord))
diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala
index 08bca04e42..c734830e0b 100644
--- a/src/library/scala/collection/generic/SortedSetFactory.scala
+++ b/src/library/scala/collection/generic/SortedSetFactory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package generic
import mutable.{Builder, SetBuilder}
@@ -23,11 +24,11 @@ abstract class SortedSetFactory[CC[A] <: SortedSet[A] with SortedSetLike[A, CC[A
def empty[A](implicit ord: Ordering[A]): CC[A]
- def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result
+ def apply[A](elems: A*)(implicit ord: Ordering[A]): CC[A] = (newBuilder[A](ord) ++= elems).result()
def newBuilder[A](implicit ord: Ordering[A]): Builder[A, CC[A]] = new SetBuilder[A, CC[A]](empty)
- implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord);
+ implicit def newCanBuildFrom[A](implicit ord : Ordering[A]) : CanBuildFrom[Coll, A, CC[A]] = new SortedSetCanBuildFrom()(ord)
class SortedSetCanBuildFrom[A](implicit ord: Ordering[A]) extends CanBuildFrom[Coll, A, CC[A]] {
def apply(from: Coll) = newBuilder[A](ord)
diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala
index e0fe07a0a4..32a9000296 100644
--- a/src/library/scala/collection/generic/Subtractable.scala
+++ b/src/library/scala/collection/generic/Subtractable.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala
index 5d1c9d17e2..ad6d8fd198 100644
--- a/src/library/scala/collection/generic/TraversableFactory.scala
+++ b/src/library/scala/collection/generic/TraversableFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package generic
import scala.language.higherKinds
diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala
index 2662018feb..c71368bba0 100644
--- a/src/library/scala/collection/generic/TraversableForwarder.scala
+++ b/src/library/scala/collection/generic/TraversableForwarder.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.collection.generic
+package scala
+package collection
+package generic
import scala.collection._
import mutable.{ Buffer, StringBuilder }
diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala
index dd47b7ace6..1beb4a8599 100644
--- a/src/library/scala/collection/generic/package.scala
+++ b/src/library/scala/collection/generic/package.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
import generic.CanBuildFrom
import scala.language.higherKinds
diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala
index ed3630edc1..70543aa3a6 100644
--- a/src/library/scala/collection/immutable/BitSet.scala
+++ b/src/library/scala/collection/immutable/BitSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -31,9 +32,6 @@ abstract class BitSet extends scala.collection.AbstractSet[Int]
with Serializable {
override def empty = BitSet.empty
- @deprecated("Use BitSet.fromBitMask[NoCopy] instead of fromArray", "2.10.0")
- def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
-
protected def fromBitMaskNoCopy(elems: Array[Long]): BitSet = BitSet.fromBitMaskNoCopy(elems)
/** Update word at index `idx`; enlarge set if `idx` outside range of set.
@@ -74,7 +72,7 @@ object BitSet extends BitSetFactory[BitSet] {
def newBuilder: Builder[Int, BitSet] = new Builder[Int, BitSet] {
private[this] val b = new mutable.BitSet
def += (x: Int) = { b += x; this }
- def clear() = b.clear
+ def clear() = b.clear()
def result() = b.toImmutable
}
@@ -82,10 +80,6 @@ object BitSet extends BitSetFactory[BitSet] {
implicit def canBuildFrom: CanBuildFrom[BitSet, Int, BitSet] = bitsetCanBuildFrom
/** A bitset containing all the bits in an array */
- @deprecated("Use fromBitMask[NoCopy] instead of fromArray", "2.10.0")
- def fromArray(elems: Array[Long]): BitSet = fromBitMaskNoCopy(elems)
-
- /** A bitset containing all the bits in an array */
def fromBitMask(elems: Array[Long]): BitSet = {
val len = elems.length
if (len == 0) empty
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
index 4a0503adfd..42a03e90ee 100755
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package immutable
-import generic._
-
/** A default map which implements the `+` and `-`
* methods of maps. It does so using the default builder for
* maps defined in the `Map` object.
@@ -42,7 +39,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
val b = Map.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
/** A default implementation which creates a new immutable map.
@@ -50,7 +47,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
override def - (key: A): Map[A, B] = {
val b = newBuilder
for (kv <- this.seq ; if kv._1 != key) b += kv
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/immutable/GenIterable.scala.disabled b/src/library/scala/collection/immutable/GenIterable.scala.disabled
deleted file mode 100644
index d34f7fd856..0000000000
--- a/src/library/scala/collection/immutable/GenIterable.scala.disabled
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A base trait for iterable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $iterableInfo
- */
-trait GenIterable[+A] extends GenTraversable[A]
- with scala.collection.GenIterable[A]
- with scala.collection.GenIterableLike[A, GenIterable[A]]
-// with GenericTraversableTemplate[A, GenIterable]
-{
- def seq: Iterable[A]
- //override def companion: GenericCompanion[GenIterable] = GenIterable
-}
-
-
-// object GenIterable extends TraversableFactory[GenIterable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
-// }
-
diff --git a/src/library/scala/collection/immutable/GenMap.scala.disabled b/src/library/scala/collection/immutable/GenMap.scala.disabled
deleted file mode 100644
index 73557a4a66..0000000000
--- a/src/library/scala/collection/immutable/GenMap.scala.disabled
+++ /dev/null
@@ -1,36 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package immutable
-
-import generic._
-
-
-/** A base trait for maps that can be mutated.
- * $possiblyparinfo
- * $mapNote
- * $mapTags
- * @since 1.0
- * @author Matthias Zenger
- */
-trait GenMap[A, +B]
-extends GenIterable[(A, B)]
- with scala.collection.GenMap[A, B]
- with scala.collection.GenMapLike[A, B, GenMap[A, B]]
-{
- def seq: Map[A, B]
-}
-
-
-// object GenMap extends MapFactory[GenMap] {
-// def empty[A, B]: Map[A, B] = Map.empty
-
-// /** $mapCanBuildFromInfo */
-// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
-// }
diff --git a/src/library/scala/collection/immutable/GenSeq.scala.disabled b/src/library/scala/collection/immutable/GenSeq.scala.disabled
deleted file mode 100644
index 713529f3db..0000000000
--- a/src/library/scala/collection/immutable/GenSeq.scala.disabled
+++ /dev/null
@@ -1,49 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A subtrait of `collection.GenSeq` which represents sequences
- * that can be mutated.
- *
- * $possiblyparinfo
- *
- * $seqInfo
- *
- * The class adds an `update` method to `collection.Seq`.
- *
- * @define Coll `mutable.Seq`
- * @define coll mutable sequence
- */
-trait GenSeq[+A] extends GenIterable[A]
- with scala.collection.GenSeq[A]
- with scala.collection.GenSeqLike[A, GenSeq[A]]
-// with GenericTraversableTemplate[A, GenSeq]
-{
- def seq: Seq[A]
- //override def companion: GenericCompanion[GenSeq] = GenSeq
-}
-
-
-// object GenSeq extends SeqFactory[GenSeq] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
-// }
-
-
-
-
-
diff --git a/src/library/scala/collection/immutable/GenSet.scala.disabled b/src/library/scala/collection/immutable/GenSet.scala.disabled
deleted file mode 100644
index 56bd2738fd..0000000000
--- a/src/library/scala/collection/immutable/GenSet.scala.disabled
+++ /dev/null
@@ -1,43 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A generic trait for mutable sets.
- *
- * $possiblyparinfo
- * $setNote
- * $setTags
- *
- * @since 1.0
- * @author Matthias Zenger
- * @define Coll `mutable.Set`
- * @define coll mutable set
- */
-trait GenSet[A] extends GenIterable[A]
- with scala.collection.GenSet[A]
- with scala.collection.GenSetLike[A, GenSet[A]]
-// with GenericSetTemplate[A, GenSet]
-{
- //override def companion: GenericCompanion[GenSet] = GenSet
- def seq: Set[A]
-}
-
-
-// object GenSet extends TraversableFactory[GenSet] {
-// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A] = Set.newBuilder
-// }
diff --git a/src/library/scala/collection/immutable/GenTraversable.scala.disabled b/src/library/scala/collection/immutable/GenTraversable.scala.disabled
deleted file mode 100644
index e5b609f9ed..0000000000
--- a/src/library/scala/collection/immutable/GenTraversable.scala.disabled
+++ /dev/null
@@ -1,41 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package immutable
-
-
-import generic._
-import mutable.Builder
-
-
-/** A trait for traversable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $traversableInfo
- * @define mutability mutable
- */
-trait GenTraversable[+A] extends scala.collection.GenTraversable[A]
- with scala.collection.GenTraversableLike[A, GenTraversable[A]]
-// with GenericTraversableTemplate[A, GenTraversable]
- with Mutable
-{
- def seq: Traversable[A]
- //override def companion: GenericCompanion[GenTraversable] = GenTraversable
-}
-
-
-// object GenTraversable extends TraversableFactory[GenTraversable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenTraversable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenTraversable[A]] = Traversable.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 84416a62d2..e74a19ef5b 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -87,9 +87,6 @@ class HashMap[A, +B] extends AbstractMap[A, B]
def split: Seq[HashMap[A, B]] = Seq(this)
- @deprecated("Use the `merged` method instead.", "2.10.0")
- def merge[B1 >: B](that: HashMap[A, B1], mergef: MergeFunction[A, B1] = null): HashMap[A, B1] = merge0(that, 0, liftMerger(mergef))
-
/** Creates a new map which is the merge of this and the argument hash map.
*
* Uses the specified collision resolution function if two keys are the same.
@@ -250,7 +247,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int {
override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] =
if (hash == this.hash) {
val kvs1 = kvs - key
- if (kvs1.isEmpty)
+ if (kvs1 eq kvs)
+ this
+ else if (kvs1.isEmpty)
HashMap.empty[A,B]
else if(kvs1.tail.isEmpty) {
val kv = kvs1.head
@@ -398,7 +397,7 @@ time { mNew.iterator.foreach( p => ()) }
*/
override def foreach[U](f: ((A, B)) => U): Unit = {
- var i = 0;
+ var i = 0
while (i < elems.length) {
elems(i).foreach(f)
i += 1
@@ -471,9 +470,6 @@ time { mNew.iterator.foreach( p => ()) }
// condition below is due to 2 things:
// 1) no unsigned int compare on JVM
// 2) 0 (no lsb) should always be greater in comparison
- val a = thislsb - 1
- val b = thatlsb - 1
-
if (unsignedCompare(thislsb - 1, thatlsb - 1)) {
val m = thiselems(thisi)
totalelems += m.size
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 87995f705f..e17f07c87b 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -301,8 +301,8 @@ time { mNew.iterator.foreach( p => ()) }
*/
override def foreach[U](f: A => U): Unit = {
- var i = 0;
- while (i < elems.length) {
+ var i = 0
+ while (i < elems.length) {
elems(i).foreach(f)
i += 1
}
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index 96414c07ef..33170fdd59 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -31,14 +32,13 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends SeqFactory[IndexedSeq] {
- override lazy val ReusableCBF =
- scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
def length = buf.length
def apply(idx: Int) = buf.apply(idx)
}
def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A]
+
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
}
diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala
index ab1faf363e..07e2ddaae2 100644
--- a/src/library/scala/collection/immutable/IntMap.scala
+++ b/src/library/scala/collection/immutable/IntMap.scala
@@ -12,6 +12,7 @@ package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
import scala.collection.mutable.{ Builder, MapBuilder }
+import scala.annotation.tailrec
/** Utility class for integer maps.
* @author David MacIver
@@ -50,8 +51,10 @@ object IntMap {
def apply(): Builder[(Int, B), IntMap[B]] = new MapBuilder[Int, B, IntMap[B]](empty[B])
}
- def empty[T] : IntMap[T] = IntMap.Nil;
- def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value);
+ def empty[T] : IntMap[T] = IntMap.Nil
+
+ def singleton[T](key: Int, value: T): IntMap[T] = IntMap.Tip(key, value)
+
def apply[T](elems: (Int, T)*): IntMap[T] =
elems.foldLeft(empty[T])((x, y) => x.updated(y._1, y._2))
@@ -427,6 +430,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
/**
* The entry with the lowest key value considered in unsigned order.
*/
+ @tailrec
final def firstKey: Int = this match {
case Bin(_, _, l, r) => l.firstKey
case Tip(k, v) => k
@@ -436,6 +440,7 @@ sealed abstract class IntMap[+T] extends AbstractMap[Int, T]
/**
* The entry with the highest key value considered in unsigned order.
*/
+ @tailrec
final def lastKey: Int = this match {
case Bin(_, _, l, r) => r.lastKey
case Tip(k, v) => k
diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala
index cc64d8ff3c..6e4eb1e45f 100644
--- a/src/library/scala/collection/immutable/Iterable.scala
+++ b/src/library/scala/collection/immutable/Iterable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala
index 5ede6d90d0..2109bd5211 100644
--- a/src/library/scala/collection/immutable/LinearSeq.scala
+++ b/src/library/scala/collection/immutable/LinearSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 2d6952ff92..c01694960c 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -85,7 +86,8 @@ sealed abstract class List[+A] extends AbstractSeq[A]
with LinearSeq[A]
with Product
with GenericTraversableTemplate[A, List]
- with LinearSeqOptimized[A, List[A]] {
+ with LinearSeqOptimized[A, List[A]]
+ with Serializable {
override def companion: GenericCompanion[List] = List
import scala.collection.{Iterable, Traversable, Seq, IndexedSeq}
@@ -158,7 +160,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @usecase def mapConserve(f: A => A): List[A]
* @inheritdoc
*/
- def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
+ @inline final def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
@tailrec
def loop(mapped: ListBuffer[B], unchanged: List[A], pending: List[A]): List[B] =
if (pending.isEmpty) {
@@ -263,7 +265,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
(b.toList, these)
}
- override def takeWhile(p: A => Boolean): List[A] = {
+ @inline final override def takeWhile(p: A => Boolean): List[A] = {
val b = new ListBuffer[A]
var these = this
while (!these.isEmpty && p(these.head)) {
@@ -273,7 +275,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
b.toList
}
- override def dropWhile(p: A => Boolean): List[A] = {
+ @inline final override def dropWhile(p: A => Boolean): List[A] = {
@tailrec
def loop(xs: List[A]): List[A] =
if (xs.isEmpty || !p(xs.head)) xs
@@ -282,7 +284,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
loop(this)
}
- override def span(p: A => Boolean): (List[A], List[A]) = {
+ @inline final override def span(p: A => Boolean): (List[A], List[A]) = {
val b = new ListBuffer[A]
var these = this
while (!these.isEmpty && p(these.head)) {
@@ -292,6 +294,16 @@ sealed abstract class List[+A] extends AbstractSeq[A]
(b.toList, these)
}
+ // Overridden with an implementation identical to the inherited one (at this time)
+ // solely so it can be finalized and thus inlinable.
+ @inline final override def foreach[U](f: A => U) {
+ var these = this
+ while (!these.isEmpty) {
+ f(these.head)
+ these = these.tail
+ }
+ }
+
override def reverse: List[A] = {
var result: List[A] = Nil
var these = this
@@ -301,7 +313,7 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
result
}
-
+
override def foldRight[B](z: B)(op: (A, B) => B): B =
reverse.foldLeft(z)((right, left) => op(left, right))
@@ -310,18 +322,6 @@ sealed abstract class List[+A] extends AbstractSeq[A]
override def toStream : Stream[A] =
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
-
- @inline override final
- def foreach[B](f: A => B) {
- var these = this
- while (!these.isEmpty) {
- f(these.head)
- these = these.tail
- }
- }
-
- @deprecated("use `distinct` instead", "2.8.0")
- def removeDuplicates: List[A] = distinct
}
/** The empty list.
@@ -386,9 +386,6 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
* @define Coll `List`
*/
object List extends SeqFactory[List] {
-
- import scala.collection.{Iterable, Seq, IndexedSeq}
-
/** $genericCanBuildFromInfo */
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, List[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
@@ -398,255 +395,9 @@ object List extends SeqFactory[List] {
override def empty[A]: List[A] = Nil
override def apply[A](xs: A*): List[A] = xs.toList
-
- /** Create a sorted list with element values `v,,>n+1,, = step(v,,n,,)`
- * where `v,,0,, = start` and elements are in the range between `start`
- * (inclusive) and `end` (exclusive).
- *
- * @param start the start value of the list
- * @param end the end value of the list
- * @param step the increment function of the list, which given `v,,n,,`,
- * computes `v,,n+1,,`. Must be monotonically increasing
- * or decreasing.
- * @return the sorted list of all integers in range `[start;end)`.
- */
- @deprecated("use `iterate` instead", "2.8.0")
- def range(start: Int, end: Int, step: Int => Int): List[Int] = {
- val up = step(start) > start
- val down = step(start) < start
- val b = new ListBuffer[Int]
- var i = start
- while ((!up || i < end) && (!down || i > end)) {
- b += i
- val next = step(i)
- if (i == next)
- throw new IllegalArgumentException("the step function did not make any progress on "+ i)
- i = next
- }
- b.toList
- }
-
- /** Create a list containing several copies of an element.
- *
- * @param n the length of the resulting list
- * @param elem the element composing the resulting list
- * @return a list composed of `n` elements all equal to `elem`
- */
- @deprecated("use `fill` instead", "2.8.0")
- def make[A](n: Int, elem: A): List[A] = {
- val b = new ListBuffer[A]
- var i = 0
- while (i < n) {
- b += elem
- i += 1
- }
- b.toList
- }
-
- /** Concatenate all the elements of a given list of lists.
- *
- * @param xss the list of lists that are to be concatenated
- * @return the concatenation of all the lists
- */
- @deprecated("use `xss.flatten` instead of `List.flatten(xss)`", "2.8.0")
- def flatten[A](xss: List[List[A]]): List[A] = {
- val b = new ListBuffer[A]
- for (xs <- xss) {
- var xc = xs
- while (!xc.isEmpty) {
- b += xc.head
- xc = xc.tail
- }
- }
- b.toList
- }
-
- /** Transforms a list of pairs into a pair of lists.
- *
- * @param xs the list of pairs to unzip
- * @return a pair of lists.
- */
- @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
- def unzip[A,B](xs: List[(A,B)]): (List[A], List[B]) = {
- val b1 = new ListBuffer[A]
- val b2 = new ListBuffer[B]
- var xc = xs
- while (!xc.isEmpty) {
- b1 += xc.head._1
- b2 += xc.head._2
- xc = xc.tail
- }
- (b1.toList, b2.toList)
- }
-
- /** Transforms an iterable of pairs into a pair of lists.
- *
- * @param xs the iterable of pairs to unzip
- * @return a pair of lists.
- */
- @deprecated("use `xs.unzip` instead of `List.unzip(xs)`", "2.8.0")
- def unzip[A,B](xs: Iterable[(A,B)]): (List[A], List[B]) =
- xs.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case ((x, y), (xs, ys)) => (x :: xs, y :: ys)
- }
-
- /**
- * Returns the `Left` values in the given `Iterable` of `Either`s.
- */
- @deprecated("use `xs collect { case Left(x: A) => x }` instead of `List.lefts(xs)`", "2.8.0")
- def lefts[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[A]](Nil)((e, as) => e match {
- case Left(a) => a :: as
- case Right(_) => as
- })
-
- /**
- * Returns the `Right` values in the given `Iterable` of `Either`s.
- */
- @deprecated("use `xs collect { case Right(x: B) => x }` instead of `List.rights(xs)`", "2.8.0")
- def rights[A, B](es: Iterable[Either[A, B]]) =
- es.foldRight[List[B]](Nil)((e, bs) => e match {
- case Left(_) => bs
- case Right(b) => b :: bs
- })
-
- /** Transforms an Iterable of Eithers into a pair of lists.
- *
- * @param es the iterable of Eithers to separate
- * @return a pair of lists.
- */
- @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead", "2.8.0")
- def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) =
- es.foldRight[(List[A], List[B])]((Nil, Nil)) {
- case (Left(a), (lefts, rights)) => (a :: lefts, rights)
- case (Right(b), (lefts, rights)) => (lefts, b :: rights)
- }
-
- /** Converts an iterator to a list.
- *
- * @param it the iterator to convert
- * @return a list that contains the elements returned by successive
- * calls to `it.next`
- */
- @deprecated("use `it.toList` instead of `List.toList(it)`", "2.8.0")
- def fromIterator[A](it: Iterator[A]): List[A] = it.toList
-
- /** Converts an array into a list.
- *
- * @param arr the array to convert
- * @return a list that contains the same elements than `arr`
- * in the same order
- */
- @deprecated("use `array.toList` instead of `List.fromArray(array)`", "2.8.0")
- def fromArray[A](arr: Array[A]): List[A] = fromArray(arr, 0, arr.length)
-
- /** Converts a range of an array into a list.
- *
- * @param arr the array to convert
- * @param start the first index to consider
- * @param len the length of the range to convert
- * @return a list that contains the same elements than `arr`
- * in the same order
- */
- @deprecated("use `array.view(start, end).toList` instead of `List.fromArray(array, start, end)`", "2.8.0")
- def fromArray[A](arr: Array[A], start: Int, len: Int): List[A] = {
- var res: List[A] = Nil
- var i = start + len
- while (i > start) {
- i -= 1
- res = arr(i) :: res
- }
- res
- }
-
- /** Returns the list resulting from applying the given function `f`
- * to corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `[f(a,,0,,,b,,0,,), ..., f(a,,n,,,b,,n,,)]` if the lists are
- * `[a,,0,,, ..., a,,k,,]`, `[b,,0,,, ..., b,,l,,]` and
- * `n = min(k,l)`
- */
- @deprecated("use `(xs, ys).zipped.map(f)` instead of `List.map2(xs, ys)(f)`", "2.8.0")
- def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
- val b = new ListBuffer[C]
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- b += f(xc.head, yc.head)
- xc = xc.tail
- yc = yc.tail
- }
- b.toList
- }
-
- /** Tests whether the given predicate `p` holds
- * for all corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `(p(a<sub>0</sub>,b<sub>0</sub>) &amp;&amp;
- * ... &amp;&amp; p(a<sub>n</sub>,b<sub>n</sub>))]`
- * if the lists are `[a<sub>0</sub>, ..., a<sub>k</sub>]`;
- * `[b<sub>0</sub>, ..., b<sub>l</sub>]`
- * and `n = min(k,l)`
- */
- @deprecated("use `(xs, ys).zipped.forall(f)` instead of `List.forall2(xs, ys)(f)`", "2.8.0")
- def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- if (!f(xc.head, yc.head)) return false
- xc = xc.tail
- yc = yc.tail
- }
- true
- }
-
- /** Tests whether the given predicate `p` holds
- * for some corresponding elements of the argument lists.
- *
- * @param f function to apply to each pair of elements.
- * @return `n != 0 &amp;&amp; (p(a<sub>0</sub>,b<sub>0</sub>) ||
- * ... || p(a<sub>n</sub>,b<sub>n</sub>))]` if the lists are
- * `[a<sub>0</sub>, ..., a<sub>k</sub>]`,
- * `[b<sub>0</sub>, ..., b<sub>l</sub>]` and
- * `n = min(k,l)`
- */
- @deprecated("use `(xs, ys).zipped.exists(f)` instead of `List.exists2(xs, ys)(f)`", "2.8.0")
- def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
- var xc = xs
- var yc = ys
- while (!xc.isEmpty && !yc.isEmpty) {
- if (f(xc.head, yc.head)) return true
- xc = xc.tail
- yc = yc.tail
- }
- false
- }
-
- /** Transposes a list of lists.
- * pre: All element lists have the same length.
- *
- * @param xss the list of lists
- * @return the transposed list of lists
- */
- @deprecated("use `xss.transpose` instead of `List.transpose(xss)`", "2.8.0")
- def transpose[A](xss: List[List[A]]): List[List[A]] = {
- val buf = new ListBuffer[List[A]]
- var yss = xss
- while (!yss.head.isEmpty) {
- buf += (yss map (_.head))
- yss = (yss map (_.tail))
- }
- buf.toList
- }
}
/** Only used for list serialization */
-@SerialVersionUID(0L - 8287891243975527522L)
-private[scala] case object ListSerializeStart
-
-/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 75817350e5..49295d92dd 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -180,7 +181,7 @@ extends AbstractMap[A, B]
* it will be overridden by this function.
*/
override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = {
- val m = if (contains(k)) this - k else this
+ val m = this - k
new m.Node[B2](k, v)
}
@@ -188,32 +189,17 @@ extends AbstractMap[A, B]
* If the map does not contain a mapping for the given key, the
* method returns the same map.
*/
- override def - (k: A): ListMap[A, B1] = {
- // This definition used to result in stack overflows
- // if (k == key)
- // next
- // else {
- // val tail = next - k
- // if (tail eq next) this
- // else new tail.Node(key, value)
- // }
- // we use an imperative one instead (and use an auxiliary list to preserve order!):
- var cur: ListMap[A, B1] = this
- var lst: List[(A, B1)] = Nil
- while (cur.nonEmpty) {
- if (k != cur.key) lst ::= ((cur.key, cur.value))
- cur = cur.tail
- }
- var acc = ListMap[A, B1]()
- while (lst != Nil) {
- val elem = lst.head
- val stbl = acc
- acc = new stbl.Node(elem._1, elem._2)
- lst = lst.tail
- }
- acc
- }
-
+ override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil)
+
+ @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] =
+ if (cur.isEmpty)
+ acc.last
+ else if (k == cur.key)
+ (cur.tail /: acc) {
+ case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459
+ }
+ else
+ remove0(k, cur.tail, cur::acc)
override def tail: ListMap[A, B1] = ListMap.this
}
diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala
index 6cf6c4259e..5836321010 100644
--- a/src/library/scala/collection/immutable/ListSet.scala
+++ b/src/library/scala/collection/immutable/ListSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -75,7 +76,7 @@ class ListSet[A] extends AbstractSet[A]
* @return number of set elements.
*/
override def size: Int = 0
- override def isEmpty: Boolean = true;
+ override def isEmpty: Boolean = true
/** Checks if this set contains element `elem`.
*
@@ -100,7 +101,7 @@ class ListSet[A] extends AbstractSet[A]
*/
override def ++(xs: GenTraversableOnce[A]): ListSet[A] =
if (xs.isEmpty) this
- else (new ListSet.ListSetBuilder(this) ++= xs.seq).result
+ else (new ListSet.ListSetBuilder(this) ++= xs.seq).result()
private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e)
private[ListSet] def unchecked_outer: ListSet[A] =
@@ -120,18 +121,18 @@ class ListSet[A] extends AbstractSet[A]
that = that.tail
res
}
- else Iterator.empty.next
+ else Iterator.empty.next()
}
/**
* @throws Predef.NoSuchElementException
*/
- override def head: A = throw new NoSuchElementException("Set has no elements");
+ override def head: A = throw new NoSuchElementException("Set has no elements")
/**
* @throws Predef.NoSuchElementException
*/
- override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set");
+ override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set")
override def stringPrefix = "ListSet"
diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala
index 2a2910439a..506546c5ba 100644
--- a/src/library/scala/collection/immutable/LongMap.scala
+++ b/src/library/scala/collection/immutable/LongMap.scala
@@ -12,6 +12,7 @@ package immutable
import scala.collection.generic.{ CanBuildFrom, BitOperations }
import scala.collection.mutable.{ Builder, MapBuilder }
+import scala.annotation.tailrec
/** Utility class for long maps.
* @author David MacIver
@@ -77,8 +78,6 @@ object LongMap {
}
}
-import LongMap._
-
// Iterator over a non-empty LongMap.
private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends AbstractIterator[T] {
@@ -98,7 +97,7 @@ private[immutable] abstract class LongMapIterator[V, T](it: LongMap[V]) extends
buffer(index) = x.asInstanceOf[AnyRef]
index += 1
}
- push(it);
+ push(it)
/**
* What value do we assign to a tip?
@@ -179,7 +178,7 @@ extends AbstractMap[Long, T]
*/
override final def foreach[U](f: ((Long, T)) => U): Unit = this match {
case LongMap.Bin(_, _, left, right) => { left.foreach(f); right.foreach(f) }
- case LongMap.Tip(key, value) => f((key, value));
+ case LongMap.Tip(key, value) => f((key, value))
case LongMap.Nil =>
}
@@ -418,5 +417,20 @@ extends AbstractMap[Long, T]
def ++[S >: T](that: LongMap[S]) =
this.unionWith[S](that, (key, x, y) => y)
+
+ @tailrec
+ final def firstKey: Long = this match {
+ case LongMap.Bin(_, _, l, r) => l.firstKey
+ case LongMap.Tip(k, v) => k
+ case LongMap.Nil => sys.error("Empty set")
+ }
+
+ @tailrec
+ final def lastKey: Long = this match {
+ case LongMap.Bin(_, _, l, r) => r.lastKey
+ case LongMap.Tip(k , v) => k
+ case LongMap.Nil => sys.error("Empty set")
+ }
+
}
diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala
index 2ebf5035e1..a3f5c85961 100644
--- a/src/library/scala/collection/immutable/Map.scala
+++ b/src/library/scala/collection/immutable/Map.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala
index 7e60f07847..f6041464e7 100644
--- a/src/library/scala/collection/immutable/MapLike.scala
+++ b/src/library/scala/collection/immutable/MapLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -123,7 +124,7 @@ self =>
def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = {
val b = bf(repr)
for ((key, value) <- this) b += ((key, f(key, value)))
- b.result
+ b.result()
}
}
diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala
index f3f04ec346..f9f19c021d 100644
--- a/src/library/scala/collection/immutable/MapProxy.scala
+++ b/src/library/scala/collection/immutable/MapProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/**
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index d3be299f89..486c2b6c8f 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -6,12 +6,11 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package immutable
import mutable.{ Builder, ListBuffer }
-import generic._
/** `NumericRange` is a more generic version of the
* `Range` class which works with arbitrary types.
@@ -81,17 +80,6 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
// to guard against any (most likely illusory) performance drop. They should
// be eliminated one way or another.
- // Counts how many elements from the start meet the given test.
- private def skipCount(p: T => Boolean): Int = {
- var current = start
- var counted = 0
-
- while (counted < length && p(current)) {
- counted += 1
- current += step
- }
- counted
- }
// Tests whether a number is within the endpoints, without testing
// whether it is a member of the sequence (i.e. when step > 1.)
private def isWithinBoundaries(elem: T) = !isEmpty && (
@@ -124,21 +112,21 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
if (idx < 0 || idx >= length) throw new IndexOutOfBoundsException(idx.toString)
else locationAfterN(idx)
}
-
+
import NumericRange.defaultOrdering
-
+
override def min[T1 >: T](implicit ord: Ordering[T1]): T =
if (ord eq defaultOrdering(num)) {
if (num.signum(step) > 0) start
else last
} else super.min(ord)
-
- override def max[T1 >: T](implicit ord: Ordering[T1]): T =
+
+ override def max[T1 >: T](implicit ord: Ordering[T1]): T =
if (ord eq defaultOrdering(num)) {
if (num.signum(step) > 0) last
else start
} else super.max(ord)
-
+
// Motivated by the desire for Double ranges with BigDecimal precision,
// we need some way to map a Range and get another Range. This can't be
// done in any fully general way because Ranges are not arbitrary
@@ -182,12 +170,11 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
def containsTyped(x: T): Boolean =
isWithinBoundaries(x) && (((x - start) % step) == zero)
- override def contains(x: Any): Boolean =
+ override def contains[A1 >: T](x: A1): Boolean =
try containsTyped(x.asInstanceOf[T])
catch { case _: ClassCastException => false }
final override def sum[B >: T](implicit num: Numeric[B]): B = {
- import num.Ops
if (isEmpty) this.num fromInt 0
else if (numRangeElements == 1) head
else ((this.num fromInt numRangeElements) * (head + last) / (this.num fromInt 2))
@@ -213,7 +200,7 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable {
/** A companion object for numeric ranges.
*/
object NumericRange {
-
+
/** Calculates the number of elements in a range given start, end, step, and
* whether or not it is inclusive. Throws an exception if step == 0 or
* the number of elements exceeds the maximum Int.
@@ -234,10 +221,10 @@ object NumericRange {
if (!isInclusive && zero == remainder) 0 else 1
)
- /** The edge cases keep coming. Since e.g.
- * Long.MaxValue + 1 == Long.MinValue
- * we do some more improbable seeming checks lest
- * overflow turn up as an empty range.
+ /* The edge cases keep coming. Since e.g.
+ * Long.MaxValue + 1 == Long.MinValue
+ * we do some more improbable seeming checks lest
+ * overflow turn up as an empty range.
*/
// The second condition contradicts an empty result.
val isOverflow = longCount == 0 && num.lt(num.plus(start, step), end) == upward
@@ -272,7 +259,7 @@ object NumericRange {
new Exclusive(start, end, step)
def inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]): Inclusive[T] =
new Inclusive(start, end, step)
-
+
private[collection] val defaultOrdering = Map[Numeric[_], Ordering[_]](
Numeric.BigIntIsIntegral -> Ordering.BigInt,
Numeric.IntIsIntegral -> Ordering.Int,
@@ -284,6 +271,6 @@ object NumericRange {
Numeric.DoubleAsIfIntegral -> Ordering.Double,
Numeric.BigDecimalAsIfIntegral -> Ordering.BigDecimal
)
-
+
}
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 952107bf78..e190d022d7 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import java.io._
@@ -30,7 +31,7 @@ object PagedSeq {
new PagedSeq[T]((data: Array[T], start: Int, len: Int) => {
var i = 0
while (i < len && source.hasNext) {
- data(start + i) = source.next
+ data(start + i) = source.next()
i += 1
}
if (i == 0) -1 else i
@@ -51,7 +52,7 @@ object PagedSeq {
if (cnt == len) cnt
else (more(data, start + cnt, len - cnt) max 0) + cnt
} else if (source.hasNext) {
- current = source.next
+ current = source.next()
more(data, start, len)
} else -1
new PagedSeq(more(_: Array[Char], _: Int, _: Int))
diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala
index 7d2ff95792..c55b46bf05 100644
--- a/src/library/scala/collection/immutable/Queue.scala
+++ b/src/library/scala/collection/immutable/Queue.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index 802e16605d..09f8143b55 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.immutable
+package scala
+package collection.immutable
import scala.collection.parallel.immutable.ParRange
@@ -77,17 +78,18 @@ extends scala.collection.AbstractSeq[Int]
final val terminalElement = start + numRangeElements * step
override def last = if (isEmpty) Nil.last else lastElement
+ override def head = if (isEmpty) Nil.head else start
override def min[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
- if (step > 0) start
+ if (step > 0) head
else last
} else super.min(ord)
override def max[A1 >: Int](implicit ord: Ordering[A1]): Int =
if (ord eq Ordering.Int) {
if (step > 0) last
- else start
+ else head
} else super.max(ord)
protected def copy(start: Int, end: Int, step: Int): Range = new Range(start, end, step)
@@ -111,6 +113,7 @@ extends scala.collection.AbstractSeq[Int]
fail()
}
+ @deprecated("Range.foreach() is now self-contained, making this auxiliary method redundant.", "2.10.1")
def validateRangeBoundaries(f: Int => Any): Boolean = {
validateMaxLength()
@@ -133,14 +136,19 @@ extends scala.collection.AbstractSeq[Int]
}
@inline final override def foreach[@specialized(Unit) U](f: Int => U) {
- if (validateRangeBoundaries(f)) {
- var i = start
- val terminal = terminalElement
- val step = this.step
- while (i != terminal) {
- f(i)
- i += step
- }
+ validateMaxLength()
+ val isCommonCase = (start != Int.MinValue || end != Int.MinValue)
+ var i = start
+ var count = 0
+ val terminal = terminalElement
+ val step = this.step
+ while(
+ if(isCommonCase) { i != terminal }
+ else { count < numRangeElements }
+ ) {
+ f(i)
+ count += 1
+ i += step
}
}
@@ -325,7 +333,7 @@ object Range {
}
}
def count(start: Int, end: Int, step: Int): Int =
- count(start, end, step, false)
+ count(start, end, step, isInclusive = false)
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
// override def par = new ParRange(this)
diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala
deleted file mode 100644
index 9739e8f3f3..0000000000
--- a/src/library/scala/collection/immutable/RedBlack.scala
+++ /dev/null
@@ -1,293 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala
-package collection
-package immutable
-
-/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
- *
- * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information).
- *
- * @since 2.3
- */
-@deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0")
-@SerialVersionUID(8691885935445612921L)
-abstract class RedBlack[A] extends Serializable {
-
- def isSmaller(x: A, y: A): Boolean
-
- private def blacken[B](t: Tree[B]): Tree[B] = t match {
- case RedTree(k, v, l, r) => BlackTree(k, v, l, r)
- case t => t
- }
- private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) =
- if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r)
-
- abstract class Tree[+B] extends Serializable {
- def isEmpty: Boolean
- def isBlack: Boolean
- def lookup(x: A): Tree[B]
- def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v))
- def delete(k: A): Tree[B] = blacken(del(k))
- def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
- def foreach[U](f: (A, B) => U)
- def toStream: Stream[(A,B)]
- def iterator: Iterator[(A, B)]
- def upd[B1 >: B](k: A, v: B1): Tree[B1]
- def del(k: A): Tree[B]
- def smallest: NonEmpty[B]
- def rng(from: Option[A], until: Option[A]): Tree[B]
- def first : A
- def last : A
- def count : Int
- }
- abstract class NonEmpty[+B] extends Tree[B] with Serializable {
- def isEmpty = false
- def key: A
- def value: B
- def left: Tree[B]
- def right: Tree[B]
- def lookup(k: A): Tree[B] =
- if (isSmaller(k, key)) left.lookup(k)
- else if (isSmaller(key, k)) right.lookup(k)
- else this
- private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match {
- case RedTree(y, yv, RedTree(x, xv, a, b), c) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case RedTree(x, xv, a, RedTree(y, yv, b, c)) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case _ =>
- mkTree(isBlack, z, zv, l, d)
- }
- private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match {
- case RedTree(z, zv, RedTree(y, yv, b, c), d) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case RedTree(y, yv, b, RedTree(z, zv, c, d)) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case _ =>
- mkTree(isBlack, x, xv, a, r)
- }
- def upd[B1 >: B](k: A, v: B1): Tree[B1] = {
- if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right)
- else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v))
- else mkTree(isBlack, k, v, left, right)
- }
- // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
- // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
- def del(k: A): Tree[B] = {
- def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
- case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) =>
- RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d))
- case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) =>
- RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d))
- case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) =>
- RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d))
- case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) =>
- RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
- case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) =>
- RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d))
- case (a, b) =>
- BlackTree(x, xv, a, b)
- }
- def subl(t: Tree[B]) = t match {
- case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b)
- case _ => sys.error("Defect: invariance violation; expected black, got "+t)
- }
- def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
- case (RedTree(y, yv, a, b), c) =>
- RedTree(x, xv, BlackTree(y, yv, a, b), c)
- case (bl, BlackTree(y, yv, a, b)) =>
- balance(x, xv, bl, RedTree(y, yv, a, b))
- case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) =>
- RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c)))
- case _ => sys.error("Defect: invariance violation at "+right)
- }
- def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
- case (a, RedTree(y, yv, b, c)) =>
- RedTree(x, xv, a, BlackTree(y, yv, b, c))
- case (BlackTree(y, yv, a, b), bl) =>
- balance(x, xv, RedTree(y, yv, a, b), bl)
- case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) =>
- RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl))
- case _ => sys.error("Defect: invariance violation at "+left)
- }
- def delLeft = left match {
- case _: BlackTree[_] => balLeft(key, value, left.del(k), right)
- case _ => RedTree(key, value, left.del(k), right)
- }
- def delRight = right match {
- case _: BlackTree[_] => balRight(key, value, left, right.del(k))
- case _ => RedTree(key, value, left, right.del(k))
- }
- def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match {
- case (Empty, t) => t
- case (t, Empty) => t
- case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) =>
- append(b, c) match {
- case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d))
- case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d))
- }
- case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) =>
- append(b, c) match {
- case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d))
- case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d))
- }
- case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c)
- case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c))
- }
- // RedBlack is neither A : Ordering[A], nor A <% Ordered[A]
- k match {
- case _ if isSmaller(k, key) => delLeft
- case _ if isSmaller(key, k) => delRight
- case _ => append(left, right)
- }
- }
-
- def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest
-
- def toStream: Stream[(A,B)] =
- left.toStream ++ Stream((key,value)) ++ right.toStream
-
- def iterator: Iterator[(A, B)] =
- left.iterator ++ Iterator.single(Pair(key, value)) ++ right.iterator
-
- def foreach[U](f: (A, B) => U) {
- left foreach f
- f(key, value)
- right foreach f
- }
-
- override def rng(from: Option[A], until: Option[A]): Tree[B] = {
- if (from == None && until == None) return this
- if (from != None && isSmaller(key, from.get)) return right.rng(from, until);
- if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get)))
- return left.rng(from, until);
- val newLeft = left.rng(from, None)
- val newRight = right.rng(None, until)
- if ((newLeft eq left) && (newRight eq right)) this
- else if (newLeft eq Empty) newRight.upd(key, value);
- else if (newRight eq Empty) newLeft.upd(key, value);
- else rebalance(newLeft, newRight)
- }
-
- // The zipper returned might have been traversed left-most (always the left child)
- // or right-most (always the right child). Left trees are traversed right-most,
- // and right trees are traversed leftmost.
-
- // Returns the zipper for the side with deepest black nodes depth, a flag
- // indicating whether the trees were unbalanced at all, and a flag indicating
- // whether the zipper was traversed left-most or right-most.
-
- // If the trees were balanced, returns an empty zipper
- private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = {
- // Once a side is found to be deeper, unzip it to the bottom
- def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = {
- val next = if (leftMost) zipper.head.left else zipper.head.right
- next match {
- case node: NonEmpty[_] => unzip(node :: zipper, leftMost)
- case Empty => zipper
- }
- }
-
- // Unzip left tree on the rightmost side and right tree on the leftmost side until one is
- // found to be deeper, or the bottom is reached
- def unzipBoth(left: Tree[B],
- right: Tree[B],
- leftZipper: List[NonEmpty[B]],
- rightZipper: List[NonEmpty[B]],
- smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match {
- case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) =>
- unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1)
- case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) =>
- unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth)
- case (_, r @ RedTree(_, _, _, _)) =>
- unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth)
- case (l @ RedTree(_, _, _, _), _) =>
- unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth)
- case (Empty, Empty) =>
- (Nil, true, false, smallerDepth)
- case (Empty, r @ BlackTree(_, _, _, _)) =>
- val leftMost = true
- (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth)
- case (l @ BlackTree(_, _, _, _), Empty) =>
- val leftMost = false
- (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth)
- }
- unzipBoth(left, right, Nil, Nil, 0)
- }
-
- private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = {
- // This is like drop(n-1), but only counting black nodes
- def findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match {
- case BlackTree(_, _, _, _) :: tail =>
- if (depth == 1) zipper else findDepth(tail, depth - 1)
- case _ :: tail => findDepth(tail, depth)
- case Nil => sys.error("Defect: unexpected empty zipper while computing range")
- }
-
- // Blackening the smaller tree avoids balancing problems on union;
- // this can't be done later, though, or it would change the result of compareDepth
- val blkNewLeft = blacken(newLeft)
- val blkNewRight = blacken(newRight)
- val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight)
-
- if (levelled) {
- BlackTree(key, value, blkNewLeft, blkNewRight)
- } else {
- val zipFrom = findDepth(zipper, smallerDepth)
- val union = if (leftMost) {
- RedTree(key, value, blkNewLeft, zipFrom.head)
- } else {
- RedTree(key, value, zipFrom.head, blkNewRight)
- }
- val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) =>
- if (leftMost)
- balanceLeft(node.isBlack, node.key, node.value, tree, node.right)
- else
- balanceRight(node.isBlack, node.key, node.value, node.left, tree)
- }
- zippedTree
- }
- }
- def first = if (left .isEmpty) key else left.first
- def last = if (right.isEmpty) key else right.last
- def count = 1 + left.count + right.count
- }
- case object Empty extends Tree[Nothing] {
- def isEmpty = true
- def isBlack = true
- def lookup(k: A): Tree[Nothing] = this
- def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty)
- def del(k: A): Tree[Nothing] = this
- def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map")
- def iterator: Iterator[(A, Nothing)] = Iterator.empty
- def toStream: Stream[(A,Nothing)] = Stream.empty
-
- def foreach[U](f: (A, Nothing) => U) {}
-
- def rng(from: Option[A], until: Option[A]) = this
- def first = throw new NoSuchElementException("empty map")
- def last = throw new NoSuchElementException("empty map")
- def count = 0
- }
- case class RedTree[+B](override val key: A,
- override val value: B,
- override val left: Tree[B],
- override val right: Tree[B]) extends NonEmpty[B] {
- def isBlack = false
- }
- case class BlackTree[+B](override val key: A,
- override val value: B,
- override val left: Tree[B],
- override val right: Tree[B]) extends NonEmpty[B] {
- def isBlack = true
- }
-}
diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala
index 0254e9ca3a..48bccde0e8 100644
--- a/src/library/scala/collection/immutable/RedBlackTree.scala
+++ b/src/library/scala/collection/immutable/RedBlackTree.scala
@@ -18,19 +18,19 @@ import scala.annotation.meta.getter
/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`.
*
* Implementation note: since efficiency is important for data structures this implementation
- * uses <code>null</code> to represent empty trees. This also means pattern matching cannot
+ * uses `null` to represent empty trees. This also means pattern matching cannot
* easily be used. The API represented by the RedBlackTree object tries to hide these
* optimizations behind a reasonably clean API.
*
* @since 2.10
*/
-private[immutable]
+private[collection]
object RedBlackTree {
def isEmpty(tree: Tree[_, _]): Boolean = tree eq null
- def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null
- def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match {
+ def contains[A: Ordering](tree: Tree[A, _], x: A): Boolean = lookup(tree, x) ne null
+ def get[A: Ordering, B](tree: Tree[A, B], x: A): Option[B] = lookup(tree, x) match {
case null => None
case tree => Some(tree.value)
}
@@ -44,8 +44,27 @@ object RedBlackTree {
}
def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count
- def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
- def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k))
+ /**
+ * Count all the nodes with keys greater than or equal to the lower bound and less than the upper bound.
+ * The two bounds are optional.
+ */
+ def countInRange[A](tree: Tree[A, _], from: Option[A], to:Option[A])(implicit ordering: Ordering[A]) : Int =
+ if (tree eq null) 0 else
+ (from, to) match {
+ // with no bounds use this node's count
+ case (None, None) => tree.count
+ // if node is less than the lower bound, try the tree on the right, it might be in range
+ case (Some(lb), _) if ordering.lt(tree.key, lb) => countInRange(tree.right, from, to)
+ // if node is greater than or equal to the upper bound, try the tree on the left, it might be in range
+ case (_, Some(ub)) if ordering.gteq(tree.key, ub) => countInRange(tree.left, from, to)
+ // node is in range so the tree on the left will all be less than the upper bound and the tree on the
+ // right will all be greater than or equal to the lower bound. So 1 for this node plus
+ // count the subtrees by stripping off the bounds that we don't need any more
+ case _ => 1 + countInRange(tree.left, from, None) + countInRange(tree.right, None, to)
+
+ }
+ def update[A: Ordering, B, B1 >: B](tree: Tree[A, B], k: A, v: B1, overwrite: Boolean): Tree[A, B1] = blacken(upd(tree, k, v, overwrite))
+ def delete[A: Ordering, B](tree: Tree[A, B], k: A): Tree[A, B] = blacken(del(tree, k))
def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match {
case (Some(from), Some(until)) => this.range(tree, from, until)
case (Some(from), None) => this.from(tree, from)
@@ -74,20 +93,26 @@ object RedBlackTree {
result
}
- def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) {
- if (tree.left ne null) foreach(tree.left, f)
+
+ def foreach[A,B,U](tree:Tree[A,B], f:((A,B)) => U):Unit = if (tree ne null) _foreach(tree,f)
+
+ private[this] def _foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U) {
+ if (tree.left ne null) _foreach(tree.left, f)
f((tree.key, tree.value))
- if (tree.right ne null) foreach(tree.right, f)
+ if (tree.right ne null) _foreach(tree.right, f)
}
- def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) {
- if (tree.left ne null) foreachKey(tree.left, f)
- f(tree.key)
- if (tree.right ne null) foreachKey(tree.right, f)
+
+ def foreachKey[A, U](tree:Tree[A,_], f: A => U):Unit = if (tree ne null) _foreachKey(tree,f)
+
+ private[this] def _foreachKey[A, U](tree: Tree[A, _], f: A => U) {
+ if (tree.left ne null) _foreachKey(tree.left, f)
+ f((tree.key))
+ if (tree.right ne null) _foreachKey(tree.right, f)
}
- def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree)
- def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree)
- def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree)
+ def iterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[(A, B)] = new EntriesIterator(tree, start)
+ def keysIterator[A: Ordering](tree: Tree[A, _], start: Option[A] = None): Iterator[A] = new KeysIterator(tree, start)
+ def valuesIterator[A: Ordering, B](tree: Tree[A, B], start: Option[A] = None): Iterator[B] = new ValuesIterator(tree, start)
@tailrec
def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -227,7 +252,7 @@ object RedBlackTree {
if (ordering.lt(tree.key, from)) return doFrom(tree.right, from)
val newLeft = doFrom(tree.left, from)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) upd(tree.right, tree.key, tree.value, false)
+ else if (newLeft eq null) upd(tree.right, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -235,7 +260,7 @@ object RedBlackTree {
if (ordering.lt(to, tree.key)) return doTo(tree.left, to)
val newRight = doTo(tree.right, to)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
@@ -243,18 +268,18 @@ object RedBlackTree {
if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until)
val newRight = doUntil(tree.right, until)
if (newRight eq tree.right) tree
- else if (newRight eq null) upd(tree.left, tree.key, tree.value, false)
+ else if (newRight eq null) upd(tree.left, tree.key, tree.value, overwrite = false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = {
if (tree eq null) return null
- if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until);
- if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until);
+ if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until)
+ if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until)
val newLeft = doFrom(tree.left, from)
val newRight = doUntil(tree.right, until)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) upd(newRight, tree.key, tree.value, false);
- else if (newRight eq null) upd(newLeft, tree.key, tree.value, false);
+ else if (newLeft eq null) upd(newRight, tree.key, tree.value, overwrite = false)
+ else if (newRight eq null) upd(newLeft, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, newRight)
}
@@ -265,7 +290,7 @@ object RedBlackTree {
if (n > count) return doDrop(tree.right, n - count - 1)
val newLeft = doDrop(tree.left, n)
if (newLeft eq tree.left) tree
- else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(tree.right, n - count - 1, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, tree.right)
}
private[this] def doTake[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = {
@@ -275,7 +300,7 @@ object RedBlackTree {
if (n <= count) return doTake(tree.left, n)
val newRight = doTake(tree.right, n - count - 1)
if (newRight eq tree.right) tree
- else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, false)
+ else if (newRight eq null) updNth(tree.left, n, tree.key, tree.value, overwrite = false)
else rebalance(tree, tree.left, newRight)
}
private[this] def doSlice[A, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = {
@@ -286,8 +311,8 @@ object RedBlackTree {
val newLeft = doDrop(tree.left, from)
val newRight = doTake(tree.right, until - count - 1)
if ((newLeft eq tree.left) && (newRight eq tree.right)) tree
- else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, false)
- else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, false)
+ else if (newLeft eq null) updNth(newRight, from - count - 1, tree.key, tree.value, overwrite = false)
+ else if (newRight eq null) updNth(newLeft, until, tree.key, tree.value, overwrite = false)
else rebalance(tree, newLeft, newRight)
}
@@ -300,54 +325,56 @@ object RedBlackTree {
// whether the zipper was traversed left-most or right-most.
// If the trees were balanced, returns an empty zipper
- private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+ private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (NList[Tree[A, B]], Boolean, Boolean, Int) = {
+ import NList.cons
// Once a side is found to be deeper, unzip it to the bottom
- def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = {
+ def unzip(zipper: NList[Tree[A, B]], leftMost: Boolean): NList[Tree[A, B]] = {
val next = if (leftMost) zipper.head.left else zipper.head.right
- next match {
- case null => zipper
- case node => unzip(node :: zipper, leftMost)
- }
+ if (next eq null) zipper
+ else unzip(cons(next, zipper), leftMost)
}
// Unzip left tree on the rightmost side and right tree on the leftmost side until one is
// found to be deeper, or the bottom is reached
def unzipBoth(left: Tree[A, B],
right: Tree[A, B],
- leftZipper: List[Tree[A, B]],
- rightZipper: List[Tree[A, B]],
- smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = {
+ leftZipper: NList[Tree[A, B]],
+ rightZipper: NList[Tree[A, B]],
+ smallerDepth: Int): (NList[Tree[A, B]], Boolean, Boolean, Int) = {
if (isBlackTree(left) && isBlackTree(right)) {
- unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1)
+ unzipBoth(left.right, right.left, cons(left, leftZipper), cons(right, rightZipper), smallerDepth + 1)
} else if (isRedTree(left) && isRedTree(right)) {
- unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth)
+ unzipBoth(left.right, right.left, cons(left, leftZipper), cons(right, rightZipper), smallerDepth)
} else if (isRedTree(right)) {
- unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth)
+ unzipBoth(left, right.left, leftZipper, cons(right, rightZipper), smallerDepth)
} else if (isRedTree(left)) {
- unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth)
+ unzipBoth(left.right, right, cons(left, leftZipper), rightZipper, smallerDepth)
} else if ((left eq null) && (right eq null)) {
- (Nil, true, false, smallerDepth)
+ (null, true, false, smallerDepth)
} else if ((left eq null) && isBlackTree(right)) {
val leftMost = true
- (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth)
+ (unzip(cons(right, rightZipper), leftMost), false, leftMost, smallerDepth)
} else if (isBlackTree(left) && (right eq null)) {
val leftMost = false
- (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth)
+ (unzip(cons(left, leftZipper), leftMost), false, leftMost, smallerDepth)
} else {
sys.error("unmatched trees in unzip: " + left + ", " + right)
}
}
- unzipBoth(left, right, Nil, Nil, 0)
+ unzipBoth(left, right, null, null, 0)
}
private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = {
// This is like drop(n-1), but only counting black nodes
- def findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match {
- case head :: tail if isBlackTree(head) =>
- if (depth == 1) zipper else findDepth(tail, depth - 1)
- case _ :: tail => findDepth(tail, depth)
- case Nil => sys.error("Defect: unexpected empty zipper while computing range")
- }
+ @tailrec
+ def findDepth(zipper: NList[Tree[A, B]], depth: Int): NList[Tree[A, B]] =
+ if (zipper eq null) {
+ sys.error("Defect: unexpected empty zipper while computing range")
+ } else if (isBlackTree(zipper.head)) {
+ if (depth == 1) zipper else findDepth(zipper.tail, depth - 1)
+ } else {
+ findDepth(zipper.tail, depth)
+ }
// Blackening the smaller tree avoids balancing problems on union;
// this can't be done later, though, or it would change the result of compareDepth
@@ -364,7 +391,7 @@ object RedBlackTree {
} else {
RedTree(tree.key, tree.value, zipFrom.head, blkNewRight)
}
- val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) =>
+ val zippedTree = NList.foldLeft(zipFrom.tail, union: Tree[A, B]) { (tree, node) =>
if (leftMost)
balanceLeft(isBlackTree(node), node.key, node.value, tree, node.right)
else
@@ -373,6 +400,25 @@ object RedBlackTree {
zippedTree
}
}
+
+ // Null optimized list implementation for tree rebalancing. null presents Nil.
+ private[this] final class NList[A](val head: A, val tail: NList[A])
+
+ private[this] final object NList {
+
+ def cons[B](x: B, xs: NList[B]): NList[B] = new NList(x, xs)
+
+ def foldLeft[A, B](xs: NList[A], z: B)(f: (B, A) => B): B = {
+ var acc = z
+ var these = xs
+ while (these ne null) {
+ acc = f(acc, these.head)
+ these = these.tail
+ }
+ acc
+ }
+
+ }
/*
* Forcing direct fields access using the @inline annotation helps speed up
@@ -419,32 +465,28 @@ object RedBlackTree {
def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right))
}
- private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] {
+ private[this] abstract class TreeIterator[A, B, R](root: Tree[A, B], start: Option[A])(implicit ordering: Ordering[A]) extends Iterator[R] {
protected[this] def nextResult(tree: Tree[A, B]): R
- override def hasNext: Boolean = next ne null
+ override def hasNext: Boolean = lookahead ne null
- override def next: R = next match {
+ override def next: R = lookahead match {
case null =>
throw new NoSuchElementException("next on empty iterator")
case tree =>
- next = findNext(tree.right)
+ lookahead = findLeftMostOrPopOnEmpty(goRight(tree))
nextResult(tree)
}
@tailrec
- private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = {
- if (tree eq null) popPath()
+ private[this] def findLeftMostOrPopOnEmpty(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) popNext()
else if (tree.left eq null) tree
- else {
- pushPath(tree)
- findNext(tree.left)
- }
- }
+ else findLeftMostOrPopOnEmpty(goLeft(tree))
- private[this] def pushPath(tree: Tree[A, B]) {
+ private[this] def pushNext(tree: Tree[A, B]) {
try {
- path(index) = tree
+ stackOfNexts(index) = tree
index += 1
} catch {
case _: ArrayIndexOutOfBoundsException =>
@@ -456,17 +498,17 @@ object RedBlackTree {
* An exception handler is used instead of an if-condition to optimize the normal path.
* This makes a large difference in iteration speed!
*/
- assert(index >= path.length)
- path :+= null
- pushPath(tree)
+ assert(index >= stackOfNexts.length)
+ stackOfNexts :+= null
+ pushNext(tree)
}
}
- private[this] def popPath(): Tree[A, B] = if (index == 0) null else {
+ private[this] def popNext(): Tree[A, B] = if (index == 0) null else {
index -= 1
- path(index)
+ stackOfNexts(index)
}
- private[this] var path = if (tree eq null) null else {
+ private[this] var stackOfNexts = if (root eq null) null else {
/*
* According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5]
* the maximum height of a red-black tree is 2*log_2(n + 2) - 2.
@@ -475,22 +517,45 @@ object RedBlackTree {
*
* We also don't store the deepest nodes in the path so the maximum path length is further reduced by one.
*/
- val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1
+ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(root.count + 2 - 1)) - 2 - 1
new Array[Tree[A, B]](maximumHeight)
}
private[this] var index = 0
- private[this] var next: Tree[A, B] = findNext(tree)
+ private[this] var lookahead: Tree[A, B] = start map startFrom getOrElse findLeftMostOrPopOnEmpty(root)
+
+ /**
+ * Find the leftmost subtree whose key is equal to the given key, or if no such thing,
+ * the leftmost subtree with the key that would be "next" after it according
+ * to the ordering. Along the way build up the iterator's path stack so that "next"
+ * functionality works.
+ */
+ private[this] def startFrom(key: A) : Tree[A,B] = if (root eq null) null else {
+ @tailrec def find(tree: Tree[A, B]): Tree[A, B] =
+ if (tree eq null) popNext()
+ else find(
+ if (ordering.lteq(key, tree.key)) goLeft(tree)
+ else goRight(tree)
+ )
+ find(root)
+ }
+
+ private[this] def goLeft(tree: Tree[A, B]) = {
+ pushNext(tree)
+ tree.left
+ }
+
+ private[this] def goRight(tree: Tree[A, B]) = tree.right
}
- private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) {
+ private[this] class EntriesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, (A, B)](tree, focus) {
override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value)
}
- private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) {
+ private[this] class KeysIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, A](tree, focus) {
override def nextResult(tree: Tree[A, B]) = tree.key
}
- private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) {
+ private[this] class ValuesIterator[A: Ordering, B](tree: Tree[A, B], focus: Option[A]) extends TreeIterator[A, B, B](tree, focus) {
override def nextResult(tree: Tree[A, B]) = tree.value
}
}
diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala
index 14610aea33..38855ca6b0 100644
--- a/src/library/scala/collection/immutable/Seq.scala
+++ b/src/library/scala/collection/immutable/Seq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index 8433c2b002..2c96f4c194 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala
index 06c6843181..9e25678435 100644
--- a/src/library/scala/collection/immutable/SetProxy.scala
+++ b/src/library/scala/collection/immutable/SetProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
/** This is a simple wrapper class for <a href="Set.html"
diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala
index eb04231c55..4b9fa81a8c 100644
--- a/src/library/scala/collection/immutable/SortedMap.scala
+++ b/src/library/scala/collection/immutable/SortedMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -82,11 +83,17 @@ self =>
override def filterKeys(p: A => Boolean): SortedMap[A, B] = new FilteredKeys(p) with SortedMap.Default[A, B] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, B] = self.rangeImpl(from, until).filterKeys(p)
+ override def iteratorFrom(start: A) = self iteratorFrom start filter {case (k, _) => p(k)}
+ override def keysIteratorFrom(start : A) = self keysIteratorFrom start filter p
+ override def valuesIteratorFrom(start : A) = self iteratorFrom start collect {case (k,v) if p(k) => v}
}
override def mapValues[C](f: B => C): SortedMap[A, C] = new MappedValues(f) with SortedMap.Default[A, C] {
implicit def ordering: Ordering[A] = self.ordering
override def rangeImpl(from : Option[A], until : Option[A]): SortedMap[A, C] = self.rangeImpl(from, until).mapValues(f)
+ override def iteratorFrom(start: A) = self iteratorFrom start map {case (k, v) => (k, f(v))}
+ override def keysIteratorFrom(start : A) = self keysIteratorFrom start
+ override def valuesIteratorFrom(start : A) = self valuesIteratorFrom start map f
}
}
@@ -106,13 +113,13 @@ object SortedMap extends ImmutableSortedMapFactory[SortedMap] {
val b = SortedMap.newBuilder[A, B1]
b ++= this
b += ((kv._1, kv._2))
- b.result
+ b.result()
}
override def - (key: A): SortedMap[A, B] = {
val b = newBuilder
for (kv <- this; if kv._1 != key) b += kv
- b.result
+ b.result()
}
}
}
diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala
index 3f75d50555..4a8859a7ab 100644
--- a/src/library/scala/collection/immutable/SortedSet.scala
+++ b/src/library/scala/collection/immutable/SortedSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 357e9a123c..65a34e2310 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 5bb4ef5f21..5e1de44749 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -181,11 +182,14 @@ import scala.language.implicitConversions
* @define coll stream
* @define orderDependent
* @define orderDependentFold
+ * @define willTerminateInf Note: lazily evaluated; will terminate for infinite-sized collections.
*/
+@deprecatedInheritance("This class will be sealed.", "2.11.0")
abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
with GenericTraversableTemplate[A, Stream]
- with LinearSeqOptimized[A, Stream[A]] {
+ with LinearSeqOptimized[A, Stream[A]]
+ with Serializable {
self =>
override def companion: GenericCompanion[Stream] = Stream
@@ -286,9 +290,8 @@ self =>
len
}
- /** It's an imperfect world, but at least we can bottle up the
- * imperfection in a capsule.
- */
+ // It's an imperfect world, but at least we can bottle up the
+ // imperfection in a capsule.
@inline private def asThat[That](x: AnyRef): That = x.asInstanceOf[That]
@inline private def asStream[B](x: AnyRef): Stream[B] = x.asInstanceOf[Stream[B]]
@inline private def isStreamBuilder[B, That](bf: CanBuildFrom[Stream[A], B, That]) =
@@ -385,12 +388,17 @@ self =>
// 1) stackoverflows (could be achieved with tailrec, too)
// 2) out of memory errors for big streams (`this` reference can be eliminated from the stack)
var rest: Stream[A] = this
- while (rest.nonEmpty && !pf.isDefinedAt(rest.head)) rest = rest.tail
+
+ // Avoids calling both `pf.isDefined` and `pf.apply`.
+ var newHead: B = null.asInstanceOf[B]
+ val runWith = pf.runWith((b: B) => newHead = b)
+
+ while (rest.nonEmpty && !runWith(rest.head)) rest = rest.tail
// without the call to the companion object, a thunk is created for the tail of the new stream,
// and the closure of the thunk will reference `this`
if (rest.isEmpty) Stream.Empty.asInstanceOf[That]
- else Stream.collectedTail(rest, pf, bf).asInstanceOf[That]
+ else Stream.collectedTail(newHead, rest, pf, bf).asInstanceOf[That]
}
}
@@ -725,10 +733,15 @@ self =>
* // produces: "5, 6, 7, 8, 9"
* }}}
*/
- override def take(n: Int): Stream[A] =
+ override def take(n: Int): Stream[A] = (
+ // Note that the n == 1 condition appears redundant but is not.
+ // It prevents "tail" from being referenced (and its head being evaluated)
+ // when obtaining the last element of the result. Such are the challenges
+ // of working with a lazy-but-not-really sequence.
if (n <= 0 || isEmpty) Stream.empty
else if (n == 1) cons(head, Stream.empty)
else cons(head, tail take n-1)
+ )
@tailrec final override def drop(n: Int): Stream[A] =
if (n <= 0 || isEmpty) this
@@ -784,8 +797,23 @@ self =>
these
}
- // there's nothing we can do about dropRight, so we just keep the definition
- // in LinearSeq
+ /**
+ * @inheritdoc
+ * $willTerminateInf
+ */
+ override def dropRight(n: Int): Stream[A] = {
+ // We make dropRight work for possibly infinite streams by carrying
+ // a buffer of the dropped size. As long as the buffer is full and the
+ // rest is non-empty, we can feed elements off the buffer head. When
+ // the rest becomes empty, the full buffer is the dropped elements.
+ def advance(stub0: List[A], stub1: List[A], rest: Stream[A]): Stream[A] = {
+ if (rest.isEmpty) Stream.empty
+ else if (stub0.isEmpty) advance(stub1.reverse, Nil, rest)
+ else cons(stub0.head, advance(stub0.tail, rest.head :: stub1, rest.tail))
+ }
+ if (n <= 0) this
+ else advance((this take n).toList, Nil, this drop n)
+ }
/** Returns the longest prefix of this `Stream` whose elements satisfy the
* predicate `p`.
@@ -973,7 +1001,7 @@ final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterat
def hasNext: Boolean = these.v.nonEmpty
def next(): A =
- if (isEmpty) Iterator.empty.next
+ if (isEmpty) Iterator.empty.next()
else {
val cur = these.v
val result = cur.head
@@ -1023,7 +1051,7 @@ object Stream extends SeqFactory[Stream] {
def result: Stream[A] = parts.toStream flatMap (_.toStream)
}
- object Empty extends Stream[Nothing] with Serializable {
+ object Empty extends Stream[Nothing] {
override def isEmpty = true
override def head = throw new NoSuchElementException("head of empty stream")
override def tail = throw new UnsupportedOperationException("tail of empty stream")
@@ -1074,7 +1102,7 @@ object Stream extends SeqFactory[Stream] {
/** A lazy cons cell, from which streams are built. */
@SerialVersionUID(-602202424901551803L)
- final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] with Serializable {
+ final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] {
override def isEmpty = false
override def head = hd
@volatile private[this] var tlVal: Stream[A] = _
@@ -1149,8 +1177,8 @@ object Stream extends SeqFactory[Stream] {
cons(stream.head, stream.tail filter p)
}
- private[immutable] def collectedTail[A, B, That](stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
- cons(pf(stream.head), stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
+ private[immutable] def collectedTail[A, B, That](head: B, stream: Stream[A], pf: PartialFunction[A, B], bf: CanBuildFrom[Stream[A], B, That]) = {
+ cons(head, stream.tail.collect(pf)(bf).asInstanceOf[Stream[B]])
}
}
diff --git a/src/library/scala/collection/immutable/StreamView.scala b/src/library/scala/collection/immutable/StreamView.scala
index 5a24b77eb3..127ed76eb5 100644
--- a/src/library/scala/collection/immutable/StreamView.scala
+++ b/src/library/scala/collection/immutable/StreamView.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
package immutable
trait StreamView[+A, +Coll] extends StreamViewLike[A, Coll, StreamView[A, Coll]] { }
diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala
index 236308da2e..ccab032cfd 100644
--- a/src/library/scala/collection/immutable/StreamViewLike.scala
+++ b/src/library/scala/collection/immutable/StreamViewLike.scala
@@ -1,4 +1,5 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala
index edea89b555..4768413e75 100644
--- a/src/library/scala/collection/immutable/StringLike.scala
+++ b/src/library/scala/collection/immutable/StringLike.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
-import generic._
import mutable.Builder
import scala.util.matching.Regex
import scala.math.ScalaNumber
@@ -19,12 +19,11 @@ import scala.reflect.ClassTag
* @since 2.8
*/
object StringLike {
-
// just statics for companion class.
- private final val LF: Char = 0x0A
- private final val FF: Char = 0x0C
- private final val CR: Char = 0x0D
- private final val SU: Char = 0x1A
+ private final val LF = 0x0A
+ private final val FF = 0x0C
+ private final val CR = 0x0D
+ private final val SU = 0x1A
}
import StringLike._
@@ -60,8 +59,8 @@ self =>
val start = from max 0
val end = until min length
- if (start >= end) newBuilder.result
- else (newBuilder ++= toString.substring(start, end)).result
+ if (start >= end) newBuilder.result()
+ else (newBuilder ++= toString.substring(start, end)).result()
}
/** Return the current string concatenated `n` times.
diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala
index a650d98697..6737692fb1 100644
--- a/src/library/scala/collection/immutable/StringOps.scala
+++ b/src/library/scala/collection/immutable/StringOps.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package immutable
import mutable.StringBuilder
diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala
index 5188343011..775d635fae 100644
--- a/src/library/scala/collection/immutable/Traversable.scala
+++ b/src/library/scala/collection/immutable/Traversable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala
index 5b4db2686a..4d2ec579db 100644
--- a/src/library/scala/collection/immutable/TreeMap.scala
+++ b/src/library/scala/collection/immutable/TreeMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -51,9 +52,6 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
with MapLike[A, B, TreeMap[A, B]]
with Serializable {
- @deprecated("use `ordering.lt` instead", "2.10.0")
- def isSmaller(x: A, y: A) = ordering.lt(x, y)
-
override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] =
TreeMap.newBuilder[A, B]
@@ -111,7 +109,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
private[this] def countWhile(p: ((A, B)) => Boolean): Int = {
var result = 0
val it = iterator
- while (it.hasNext && p(it.next)) result += 1
+ while (it.hasNext && p(it.next())) result += 1
result
}
override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p))
@@ -131,7 +129,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @param value the value to be associated with `key`
* @return a new $coll with the updated binding
*/
- override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, true))
+ override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value, overwrite = true))
/** Add a key/value pair to this map.
* @tparam B1 type of the value of the new binding, a supertype of `B`
@@ -171,7 +169,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
*/
def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = {
assert(!RB.contains(tree, key))
- new TreeMap(RB.update(tree, key, value, true))
+ new TreeMap(RB.update(tree, key, value, overwrite = true))
}
def - (key:A): TreeMap[A, B] =
@@ -192,9 +190,13 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi
* @return the new iterator
*/
override def iterator: Iterator[(A, B)] = RB.iterator(tree)
+ override def iteratorFrom(start: A): Iterator[(A, B)] = RB.iterator(tree, Some(start))
override def keysIterator: Iterator[A] = RB.keysIterator(tree)
+ override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
+
override def valuesIterator: Iterator[B] = RB.valuesIterator(tree)
+ override def valuesIteratorFrom(start: A): Iterator[B] = RB.valuesIterator(tree, Some(start))
override def contains(key: A): Boolean = RB.contains(tree, key)
override def isDefinedAt(key: A): Boolean = RB.contains(tree, key)
diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala
index 494776587d..dfe1a833ef 100644
--- a/src/library/scala/collection/immutable/TreeSet.scala
+++ b/src/library/scala/collection/immutable/TreeSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
@@ -89,16 +90,13 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
private[this] def countWhile(p: A => Boolean): Int = {
var result = 0
val it = iterator
- while (it.hasNext && p(it.next)) result += 1
+ while (it.hasNext && p(it.next())) result += 1
result
}
override def dropWhile(p: A => Boolean) = drop(countWhile(p))
override def takeWhile(p: A => Boolean) = take(countWhile(p))
override def span(p: A => Boolean) = splitAt(countWhile(p))
- @deprecated("use `ordering.lt` instead", "2.10.0")
- def isSmaller(x: A, y: A) = compare(x,y) < 0
-
def this()(implicit ordering: Ordering[A]) = this(null)(ordering)
private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t)
@@ -112,7 +110,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @param elem a new element to add.
* @return a new $coll containing `elem` and all the elements of this $coll.
*/
- def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), false))
+ def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, (), overwrite = false))
/** A new `TreeSet` with the entry added is returned,
* assuming that elem is <em>not</em> in the TreeSet.
@@ -122,7 +120,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
*/
def insert(elem: A): TreeSet[A] = {
assert(!RB.contains(tree, elem))
- newSet(RB.update(tree, elem, (), false))
+ newSet(RB.update(tree, elem, (), overwrite = false))
}
/** Creates a new `TreeSet` with the entry removed.
@@ -147,6 +145,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin
* @return the new iterator
*/
def iterator: Iterator[A] = RB.keysIterator(tree)
+ override def keysIteratorFrom(start: A): Iterator[A] = RB.keysIterator(tree, Some(start))
override def foreach[U](f: A => U) = RB.foreachKey(tree, f)
diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala
index ae427852d4..d7335e80f1 100644
--- a/src/library/scala/collection/immutable/TrieIterator.scala
+++ b/src/library/scala/collection/immutable/TrieIterator.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package immutable
import HashMap.{ HashTrieMap, HashMapCollision1, HashMap1 }
@@ -46,7 +47,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
case x: HashSetCollision1[_] => x.ks.map(x => HashSet(x)).toArray
}).asInstanceOf[Array[Iterable[T]]]
- private type SplitIterators = ((Iterator[T], Int), Iterator[T])
+ private[this] type SplitIterators = ((Iterator[T], Int), Iterator[T])
private def isTrie(x: AnyRef) = x match {
case _: HashTrieMap[_,_] | _: HashTrieSet[_] => true
@@ -94,7 +95,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
def hasNext = (subIter ne null) || depth >= 0
def next(): T = {
if (subIter ne null) {
- val el = subIter.next
+ val el = subIter.next()
if (!subIter.hasNext)
subIter = null
el
@@ -135,7 +136,7 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
}
else {
subIter = m.iterator
- next
+ next()
}
// The much slower version:
//
@@ -177,7 +178,6 @@ private[collection] abstract class TrieIterator[+T](elems: Array[Iterable[T]]) e
if (depth > 0) {
// 2) topmost comes before (is not) arrayD
// steal a portion of top to create a new iterator
- val topmost = arrayStack(0)
if (posStack(0) == arrayStack(0).length - 1) {
// 2a) only a single entry left on top
// this means we have to modify this iterator - pop topmost
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index bcce4a99bd..571e6775c8 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -18,16 +18,7 @@ import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
*/
-object Vector extends SeqFactory[Vector] {
- // left lying around for binary compatibility check
- private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
- override def apply() = newBuilder[Nothing]
- }
- // left lying around for binary compatibility check
- private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
-
- override lazy val ReusableCBF =
- scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
+object Vector extends IndexedSeqFactory[Vector] {
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
@@ -113,7 +104,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (0 < i) {
i -= 1
self(i)
- } else Iterator.empty.next
+ } else Iterator.empty.next()
}
// TODO: reverse
@@ -148,7 +139,7 @@ override def companion: GenericCompanion[Vector] = Vector
if (bf eq IndexedSeq.ReusableCBF) appendFront(elem).asInstanceOf[That] // just ignore bf
else super.+:(elem)(bf)
- override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
+ override def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That =
if (bf eq IndexedSeq.ReusableCBF) appendBack(elem).asInstanceOf[That] // just ignore bf
else super.:+(elem)(bf)
@@ -251,8 +242,8 @@ override def companion: GenericCompanion[Vector] = Vector
private[immutable] def appendFront[B>:A](value: B): Vector[B] = {
if (endIndex != startIndex) {
- var blockIndex = (startIndex - 1) & ~31
- var lo = (startIndex - 1) & 31
+ val blockIndex = (startIndex - 1) & ~31
+ val lo = (startIndex - 1) & 31
if (startIndex != blockIndex + 32) {
val s = new Vector(startIndex - 1, endIndex, blockIndex)
@@ -270,7 +261,7 @@ override def companion: GenericCompanion[Vector] = Vector
//println("----- appendFront " + value + " at " + (startIndex - 1) + " reached block start")
if (shift != 0) {
// case A: we can shift right on the top level
- debug
+ debug()
//println("shifting right by " + shiftBlocks + " at level " + (depth-1) + " (had "+freeSpace+" free space)")
if (depth > 1) {
@@ -280,7 +271,7 @@ override def companion: GenericCompanion[Vector] = Vector
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(0, shiftBlocks) // shift right by n blocks
- s.debug
+ s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // maybe create pos; prepare for writing
s.display0(lo) = value.asInstanceOf[AnyRef]
//assert(depth == s.depth)
@@ -298,7 +289,7 @@ override def companion: GenericCompanion[Vector] = Vector
s.shiftTopLevel(0, shiftBlocks) // shift right by n elements
s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // prepare for writing
s.display0(shift-1) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
s
}
} else if (blockIndex < 0) {
@@ -313,10 +304,10 @@ override def companion: GenericCompanion[Vector] = Vector
val s = new Vector(startIndex - 1 + move, endIndex + move, newBlockIndex)
s.initFrom(this)
s.dirty = dirty
- s.debug
+ s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex) // could optimize: we know it will create a whole branch
s.display0(lo) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
//assert(s.depth == depth+1)
s
} else {
@@ -348,8 +339,8 @@ override def companion: GenericCompanion[Vector] = Vector
// //println("------- append " + value)
// debug()
if (endIndex != startIndex) {
- var blockIndex = endIndex & ~31
- var lo = endIndex & 31
+ val blockIndex = endIndex & ~31
+ val lo = endIndex & 31
if (endIndex != blockIndex) {
//println("will make writable block (from "+focus+") at: " + blockIndex)
@@ -366,7 +357,7 @@ override def companion: GenericCompanion[Vector] = Vector
//println("----- appendBack " + value + " at " + endIndex + " reached block end")
if (shift != 0) {
- debug
+ debug()
//println("shifting left by " + shiftBlocks + " at level " + (depth-1) + " (had "+startIndex+" free space)")
if (depth > 1) {
val newBlockIndex = blockIndex - shift
@@ -375,10 +366,10 @@ override def companion: GenericCompanion[Vector] = Vector
s.initFrom(this)
s.dirty = dirty
s.shiftTopLevel(shiftBlocks, 0) // shift left by n blocks
- s.debug
+ s.debug()
s.gotoFreshPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(lo) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
//assert(depth == s.depth)
s
} else {
@@ -394,7 +385,7 @@ override def companion: GenericCompanion[Vector] = Vector
s.shiftTopLevel(shiftBlocks, 0) // shift right by n elements
s.gotoPosWritable(newFocus, newBlockIndex, newFocus ^ newBlockIndex)
s.display0(32 - shift) = value.asInstanceOf[AnyRef]
- s.debug
+ s.debug()
s
}
} else {
@@ -409,7 +400,7 @@ override def companion: GenericCompanion[Vector] = Vector
//assert(s.depth == depth+1) might or might not create new level!
if (s.depth == depth+1) {
//println("creating new level " + s.depth + " (had "+0+" free space)")
- s.debug
+ s.debug()
}
s
}
@@ -583,9 +574,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
private def dropFront0(cutIndex: Int): Vector[A] = {
- var blockIndex = cutIndex & ~31
- var lo = cutIndex & 31
-
+ val blockIndex = cutIndex & ~31
val xor = cutIndex ^ (endIndex - 1)
val d = requiredDepth(xor)
val shift = (cutIndex & ~((1 << (5*d))-1))
@@ -615,9 +604,7 @@ override def companion: GenericCompanion[Vector] = Vector
}
private def dropBack0(cutIndex: Int): Vector[A] = {
- var blockIndex = (cutIndex - 1) & ~31
- var lo = ((cutIndex - 1) & 31) + 1
-
+ val blockIndex = (cutIndex - 1) & ~31
val xor = startIndex ^ (cutIndex - 1)
val d = requiredDepth(xor)
val shift = (startIndex & ~((1 << (5*d))-1))
@@ -639,14 +626,13 @@ override def companion: GenericCompanion[Vector] = Vector
}
-class VectorIterator[+A](_startIndex: Int, _endIndex: Int)
+class VectorIterator[+A](_startIndex: Int, endIndex: Int)
extends AbstractIterator[A]
with Iterator[A]
with VectorPointer[A @uncheckedVariance] {
private var blockIndex: Int = _startIndex & ~31
private var lo: Int = _startIndex & 31
- private var endIndex: Int = _endIndex
private var endLo = math.min(endIndex - blockIndex, 32)
@@ -676,13 +662,13 @@ extends AbstractIterator[A]
res
}
- private[collection] def remainingElementCount: Int = (_endIndex - (blockIndex + lo)) max 0
+ private[collection] def remainingElementCount: Int = (endIndex - (blockIndex + lo)) max 0
/** Creates a new vector which consists of elements remaining in this iterator.
* Such a vector can then be split into several vectors using methods like `take` and `drop`.
*/
private[collection] def remainingVector: Vector[A] = {
- val v = new Vector(blockIndex + lo, _endIndex, blockIndex + lo)
+ val v = new Vector(blockIndex + lo, endIndex, blockIndex + lo)
v.initFrom(this)
v
}
diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala
index edcab31f33..d6bebf9ef5 100644
--- a/src/library/scala/collection/immutable/WrappedString.scala
+++ b/src/library/scala/collection/immutable/WrappedString.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package immutable
import generic._
diff --git a/src/library/scala/collection/immutable/package.scala b/src/library/scala/collection/immutable/package.scala
deleted file mode 100644
index ed0c1b3736..0000000000
--- a/src/library/scala/collection/immutable/package.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-
-package immutable {
- /** It looks like once upon a time this was used by ParRange, but
- * since December 2010 in r23721 it is not used by anything. We
- * should not have public API traits with seductive names like
- * "RangeUtils" which are neither documented nor used.
- */
- @deprecated("this class will be removed", "2.10.0")
- trait RangeUtils[+Repr <: RangeUtils[Repr]] {
- def start: Int
- def end: Int
- def step: Int
- def inclusive: Boolean
- def create(_start: Int, _end: Int, _step: Int, _inclusive: Boolean): Repr
-
- private final def inclusiveLast: Int = {
- val size = end.toLong - start.toLong
- (size / step.toLong * step.toLong + start.toLong).toInt
- }
-
- final def _last: Int = (
- if (!inclusive) {
- if (step == 1 || step == -1) end - step
- else {
- val inclast = inclusiveLast
- if ((end.toLong - start.toLong) % step == 0) inclast - step else inclast
- }
- }
- else if (step == 1 || step == -1) end
- else inclusiveLast
- )
-
- final def _foreach[U](f: Int => U) = if (_length > 0) {
- var i = start
- val last = _last
- while (i != last) {
- f(i)
- i += step
- }
- }
-
- final def _length: Int = (
- if (!inclusive) {
- if (end > start == step > 0 && start != end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt
- else {
- if (end > start == step > 0 || start == end) {
- (_last.toLong - start.toLong) / step.toLong + 1
- } else 0
- }.toInt
- )
-
- final def _apply(idx: Int): Int = {
- if (idx < 0 || idx >= _length) throw new IndexOutOfBoundsException(idx.toString)
- start + idx * step
- }
-
- private def locationAfterN(n: Int) = (
- if (n > 0) {
- if (step > 0)
- scala.math.min(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
- else
- scala.math.max(start.toLong + step.toLong * n.toLong, _last.toLong).toInt
- }
- else start
- )
-
- final def _take(n: Int) = (
- if (n > 0 && _length > 0)
- create(start, locationAfterN(n), step, true)
- else
- create(start, start, step, false)
- )
-
- final def _drop(n: Int) = create(locationAfterN(n), end, step, inclusive)
- final def _slice(from: Int, until: Int) = _drop(from)._take(until - from)
- }
-}
-
-package object immutable {
- /** Nothing left after I promoted RangeUtils to the package. */
-}
diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala
index 157e5dae62..d2205f9994 100644
--- a/src/library/scala/collection/mutable/AVLTree.scala
+++ b/src/library/scala/collection/mutable/AVLTree.scala
@@ -10,12 +10,11 @@ package scala
package collection
package mutable
-
/**
* An immutable AVL Tree implementation used by mutable.TreeSet
*
* @author Lucien Pereira
- *
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
*/
private[mutable] sealed trait AVLTree[+A] extends Serializable {
def balance: Int
@@ -65,13 +64,19 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable {
def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.")
}
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
+ */
private case object Leaf extends AVLTree[Nothing] {
override val balance: Int = 0
override val depth: Int = -1
}
-private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] {
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
+ */
+private case class Node[A](data: A, left: AVLTree[A], right: AVLTree[A]) extends AVLTree[A] {
override val balance: Int = right.depth - left.depth
override val depth: Int = math.max(left.depth, right.depth) + 1
@@ -205,6 +210,9 @@ private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree
}
}
+/**
+ * @deprecated("AVLTree and its related classes are being removed from the standard library since they're not different enough from RedBlackTree to justify keeping them.", "2.11")
+ */
private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
val stack = mutable.ArrayStack[Node[A]](root)
diveLeft()
@@ -220,11 +228,11 @@ private class AVLIterator[A](root: Node[A]) extends Iterator[A] {
private def engageRight(): Unit = {
if (Leaf != stack.head.right) {
val right: Node[A] = stack.head.right.asInstanceOf[Node[A]]
- stack.pop
+ stack.pop()
stack.push(right)
diveLeft()
} else
- stack.pop
+ stack.pop()
}
override def hasNext: Boolean = !stack.isEmpty
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index f1cfd2d69a..2d43b352c5 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 0ce2cda32c..65b4d52a60 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime
diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala
index 31f3d2a497..4a6820856d 100644
--- a/src/library/scala/collection/mutable/ArrayLike.scala
+++ b/src/library/scala/collection/mutable/ArrayLike.scala
@@ -6,11 +6,9 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
/** A common supertrait of `ArrayOps` and `WrappedArray` that factors out most
* operations on arrays and wrapped arrays.
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 25ba7e4ce6..4c996bfb88 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -52,6 +52,20 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
super.toArray[U]
}
+ def :+[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
+ val result = Array.ofDim[B](repr.length + 1)
+ Array.copy(repr, 0, result, 0, repr.length)
+ result(repr.length) = elem
+ result
+ }
+
+ def +:[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
+ val result = Array.ofDim[B](repr.length + 1)
+ result(0) = elem
+ Array.copy(repr, 0, result, 1, repr.length)
+ result
+ }
+
override def par = ParArray.handoff(repr)
/** Flattens a two-dimensional array by concatenating all its rows
@@ -66,7 +80,7 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
b.sizeHint(map{case is: scala.collection.IndexedSeq[_] => is.size case _ => 0}.sum)
for (xs <- this)
b ++= asTrav(xs)
- b.result
+ b.result()
}
/** Transposes a two dimensional array.
diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala
index 33f6949662..577a838315 100644
--- a/src/library/scala/collection/mutable/ArraySeq.scala
+++ b/src/library/scala/collection/mutable/ArraySeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -90,7 +91,7 @@ extends AbstractSeq[A]
}
override def clone(): ArraySeq[A] = {
- val cloned = array.clone.asInstanceOf[Array[AnyRef]]
+ val cloned = array.clone().asInstanceOf[Array[AnyRef]]
new ArraySeq[A](length) {
override val array = cloned
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 670558ab06..e05d668519 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -150,7 +150,7 @@ extends AbstractSeq[T]
*
* @param f The function to drain to.
*/
- def drain(f: T => Unit) = while (!isEmpty) f(pop)
+ def drain(f: T => Unit) = while (!isEmpty) f(pop())
/** Pushes all the provided elements in the traversable object onto the stack.
*
@@ -190,7 +190,7 @@ extends AbstractSeq[T]
*
* @param f The function to apply to the top two elements.
*/
- def combine(f: (T, T) => T): Unit = push(f(pop, pop))
+ def combine(f: (T, T) => T): Unit = push(f(pop(), pop()))
/** Repeatedly combine the top elements of the stack until the stack contains only
* one element.
diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala
index 2a535a799c..f252a0bfeb 100644
--- a/src/library/scala/collection/mutable/BitSet.scala
+++ b/src/library/scala/collection/mutable/BitSet.scala
@@ -8,11 +8,12 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
-import BitSetLike.{LogWL, updateArray}
+import BitSetLike.{LogWL, MaxSize, updateArray}
/** A class for mutable bitsets.
*
@@ -58,14 +59,19 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
if (idx < nwords) elems(idx) else 0L
private def updateWord(idx: Int, w: Long) {
+ ensureCapacity(idx)
+ elems(idx) = w
+ }
+
+ private def ensureCapacity(idx: Int) {
+ require(idx < MaxSize)
if (idx >= nwords) {
var newlen = nwords
- while (idx >= newlen) newlen = newlen * 2
+ while (idx >= newlen) newlen = (newlen * 2) min MaxSize
val elems1 = new Array[Long](newlen)
Array.copy(elems, 0, elems1, 0, nwords)
elems = elems1
}
- elems(idx) = w
}
protected def fromBitMaskNoCopy(words: Array[Long]): BitSet = new BitSet(words)
@@ -92,6 +98,51 @@ class BitSet(protected var elems: Array[Long]) extends AbstractSet[Int]
def += (elem: Int): this.type = { add(elem); this }
def -= (elem: Int): this.type = { remove(elem); this }
+ /** Updates this bitset to the union with another bitset by performing a bitwise "or".
+ *
+ * @param other the bitset to form the union with.
+ * @return the bitset itself.
+ */
+ def |= (other: BitSet): this.type = {
+ ensureCapacity(other.nwords)
+ for (i <- 0 until other.nwords)
+ elems(i) = elems(i) | other.word(i)
+ this
+ }
+ /** Updates this bitset to the intersection with another bitset by performing a bitwise "and".
+ *
+ * @param other the bitset to form the intersection with.
+ * @return the bitset itself.
+ */
+ def &= (other: BitSet): this.type = {
+ ensureCapacity(other.nwords)
+ for (i <- 0 until other.nwords)
+ elems(i) = elems(i) & other.word(i)
+ this
+ }
+ /** Updates this bitset to the symmetric difference with another bitset by performing a bitwise "xor".
+ *
+ * @param other the bitset to form the symmetric difference with.
+ * @return the bitset itself.
+ */
+ def ^= (other: BitSet): this.type = {
+ ensureCapacity(other.nwords)
+ for (i <- 0 until other.nwords)
+ elems(i) = elems(i) ^ other.word(i)
+ this
+ }
+ /** Updates this bitset to the difference with another bitset by performing a bitwise "and-not".
+ *
+ * @param other the bitset to form the difference with.
+ * @return the bitset itself.
+ */
+ def &~= (other: BitSet): this.type = {
+ ensureCapacity(other.nwords)
+ for (i <- 0 until other.nwords)
+ elems(i) = elems(i) & ~other.word(i)
+ this
+ }
+
override def clear() {
elems = new Array[Long](elems.length)
}
diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala
index 230799c6f3..d2e33badbe 100644
--- a/src/library/scala/collection/mutable/Buffer.scala
+++ b/src/library/scala/collection/mutable/Buffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 5935a2858a..4b3d3bc1cf 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -198,7 +199,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
case Remove(Index(n), x) => if (this(n) == x) remove(n)
case Remove(NoLo, x) => this -= x
- case Reset() => clear
+ case Reset() => clear()
case s: Script[_] => s.iterator foreach <<
case _ => throw new UnsupportedOperationException("message " + cmd + " not understood")
}
@@ -260,6 +261,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
override def clone(): This = {
val bf = newBuilder
bf ++= this
- bf.result.asInstanceOf[This]
+ bf.result().asInstanceOf[This]
}
}
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index 37aa1862fa..14946cdfd8 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
import script._
/** This is a simple proxy class for <a href="Buffer.html"
@@ -127,7 +125,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
/** Clears the buffer contents.
*/
- def clear() { self.clear }
+ def clear() { self.clear() }
/** Send a message to this scriptable object.
*
diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala
index 5c0681df1d..75560580cc 100644
--- a/src/library/scala/collection/mutable/Builder.scala
+++ b/src/library/scala/collection/mutable/Builder.scala
@@ -121,7 +121,7 @@ trait Builder[-Elem, +To] extends Growable[Elem] {
override def ++=(xs: TraversableOnce[Elem]): this.type = { self ++= xs; this }
override def sizeHint(size: Int) = self.sizeHint(size)
override def sizeHintBounded(size: Int, boundColl: TraversableLike[_, _]) = self.sizeHintBounded(size, boundColl)
- def result: NewTo = f(self.result)
+ def result: NewTo = f(self.result())
}
}
diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala
index dadcd36257..8b2f3f70de 100644
--- a/src/library/scala/collection/mutable/Cloneable.scala
+++ b/src/library/scala/collection/mutable/Cloneable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** A trait for cloneable collections.
diff --git a/src/library/scala/collection/mutable/ConcurrentMap.scala b/src/library/scala/collection/mutable/ConcurrentMap.scala
deleted file mode 100644
index 5b5d738d03..0000000000
--- a/src/library/scala/collection/mutable/ConcurrentMap.scala
+++ /dev/null
@@ -1,90 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package mutable
-
-/** A template trait for mutable maps that allow concurrent access.
- *
- * $concurrentmapinfo
- *
- * @since 2.8
- * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#concurrent_maps "Scala's Collection Library overview"]]
- * section on `Concurrent Maps` for more information.
- *
- * @tparam A the key type of the map
- * @tparam B the value type of the map
- *
- * @define Coll `ConcurrentMap`
- * @define coll concurrent map
- * @define concurrentmapinfo
- * This is a base trait for all Scala concurrent map implementations. It
- * provides all of the methods a `Map` does, with the difference that all the
- * changes are atomic. It also describes methods specific to concurrent maps.
- *
- * '''Note''': The concurrent maps do not accept `'''null'''` for keys or values.
- *
- * @define atomicop
- * This is an atomic operation.
- */
-@deprecated("Use `scala.collection.concurrent.Map` instead.", "2.10.0")
-trait ConcurrentMap[A, B] extends Map[A, B] {
-
- /**
- * Associates the given key with a given value, unless the key was already
- * associated with some other value.
- *
- * $atomicop
- *
- * @param k key with which the specified value is to be associated with
- * @param v value to be associated with the specified key
- * @return `Some(oldvalue)` if there was a value `oldvalue` previously
- * associated with the specified key, or `None` if there was no
- * mapping for the specified key
- */
- def putIfAbsent(k: A, v: B): Option[B]
-
- /**
- * Removes the entry for the specified key if its currently mapped to the
- * specified value.
- *
- * $atomicop
- *
- * @param k key for which the entry should be removed
- * @param v value expected to be associated with the specified key if
- * the removal is to take place
- * @return `true` if the removal took place, `false` otherwise
- */
- def remove(k: A, v: B): Boolean
-
- /**
- * Replaces the entry for the given key only if it was previously mapped to
- * a given value.
- *
- * $atomicop
- *
- * @param k key for which the entry should be replaced
- * @param oldvalue value expected to be associated with the specified key
- * if replacing is to happen
- * @param newvalue value to be associated with the specified key
- * @return `true` if the entry was replaced, `false` otherwise
- */
- def replace(k: A, oldvalue: B, newvalue: B): Boolean
-
- /**
- * Replaces the entry for the given key only if it was previously mapped
- * to some value.
- *
- * $atomicop
- *
- * @param k key for which the entry should be replaced
- * @param v value to be associated with the specified key
- * @return `Some(v)` if the given key was previously mapped to some value `v`, or `None` otherwise
- */
- def replace(k: A, v: B): Option[B]
-}
diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala
index f14cb4ac2b..66db45866c 100644
--- a/src/library/scala/collection/mutable/DefaultEntry.scala
+++ b/src/library/scala/collection/mutable/DefaultEntry.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** Class used internally for default map model.
diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala
index 903f117466..4dfae047c3 100644
--- a/src/library/scala/collection/mutable/DefaultMapModel.scala
+++ b/src/library/scala/collection/mutable/DefaultMapModel.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** This class is used internally. It implements the mutable `Map`
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 18a1e234f6..8654a91cf1 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -68,7 +69,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A]
override def clone(): DoubleLinkedList[A] = {
val builder = newBuilder
builder ++= this
- builder.result
+ builder.result()
}
}
diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
index 3f223f30ec..776ac76c45 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.migration
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 91e95e039b..1cdf150cb8 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -6,18 +6,16 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package mutable
-
/** An implementation class backing a `HashSet`.
*
* This trait is used internally. It can be mixed in with various collections relying on
* hash table as an implementation.
*
* @define coll flat hash table
- * @define cannotStoreNull '''Note''': A $coll cannot store `null` elements.
* @since 2.3
* @tparam A the type of the elements contained in the $coll.
*/
@@ -78,7 +76,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
assert(size >= 0)
table = new Array(capacity(sizeForThreshold(size, _loadFactor)))
- threshold = newThreshold(_loadFactor, table.size)
+ threshold = newThreshold(_loadFactor, table.length)
seedvalue = in.readInt()
@@ -87,9 +85,9 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
var index = 0
while (index < size) {
- val elem = in.readObject().asInstanceOf[A]
+ val elem = entryToElem(in.readObject())
f(elem)
- addEntry(elem)
+ addElem(elem)
index += 1
}
}
@@ -109,61 +107,78 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
}
/** Finds an entry in the hash table if such an element exists. */
- protected def findEntry(elem: A): Option[A] = {
- val entry = findEntryImpl(elem)
- if (null == entry) None else Some(entry.asInstanceOf[A])
- }
+ protected def findEntry(elem: A): Option[A] =
+ findElemImpl(elem) match {
+ case null => None
+ case entry => Some(entryToElem(entry))
+ }
+
/** Checks whether an element is contained in the hash table. */
- protected def containsEntry(elem: A): Boolean = {
- null != findEntryImpl(elem)
+ protected def containsElem(elem: A): Boolean = {
+ null != findElemImpl(elem)
}
- private def findEntryImpl(elem: A): AnyRef = {
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry && entry != elem) {
+ private def findElemImpl(elem: A): AnyRef = {
+ val searchEntry = elemToEntry(elem)
+ var h = index(searchEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry && curEntry != searchEntry) {
h = (h + 1) % table.length
- entry = table(h)
+ curEntry = table(h)
}
- entry
+ curEntry
}
- /** Add entry if not yet in table.
- * @return Returns `true` if a new entry was added, `false` otherwise.
+ /** Add elem if not yet in table.
+ * @return Returns `true` if a new elem was added, `false` otherwise.
*/
- protected def addEntry(elem: A) : Boolean = {
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry) {
- if (entry == elem) return false
+ protected def addElem(elem: A) : Boolean = {
+ addEntry(elemToEntry(elem))
+ }
+
+ /**
+ * Add an entry (an elem converted to an entry via elemToEntry) if not yet in
+ * table.
+ * @return Returns `true` if a new elem was added, `false` otherwise.
+ */
+ protected def addEntry(newEntry : AnyRef) : Boolean = {
+ var h = index(newEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry) {
+ if (curEntry == newEntry) return false
h = (h + 1) % table.length
- entry = table(h)
+ curEntry = table(h)
//Statistics.collisions += 1
}
- table(h) = elem.asInstanceOf[AnyRef]
+ table(h) = newEntry
tableSize = tableSize + 1
nnSizeMapAdd(h)
if (tableSize >= threshold) growTable()
true
+
}
- /** Removes an entry from the hash table, returning an option value with the element, or `None` if it didn't exist. */
- protected def removeEntry(elem: A) : Option[A] = {
+ /**
+ * Removes an elem from the hash table returning true if the element was found (and thus removed)
+ * or false if it didn't exist.
+ */
+ protected def removeElem(elem: A) : Boolean = {
if (tableDebug) checkConsistent()
def precedes(i: Int, j: Int) = {
val d = table.length >> 1
if (i <= j) j - i < d
else i - j > d
}
- var h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry) {
- if (entry == elem) {
+ val removalEntry = elemToEntry(elem)
+ var h = index(removalEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry) {
+ if (curEntry == removalEntry) {
var h0 = h
var h1 = (h0 + 1) % table.length
while (null != table(h1)) {
- val h2 = index(elemHashCode(table(h1).asInstanceOf[A]))
+ val h2 = index(table(h1).hashCode)
//Console.println("shift at "+h1+":"+table(h1)+" with h2 = "+h2+"? "+(h2 != h1)+precedes(h2, h0)+table.length)
if (h2 != h1 && precedes(h2, h0)) {
//Console.println("shift "+h1+" to "+h0+"!")
@@ -176,12 +191,12 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
tableSize -= 1
nnSizeMapRemove(h0)
if (tableDebug) checkConsistent()
- return Some(entry.asInstanceOf[A])
+ return true
}
h = (h + 1) % table.length
- entry = table(h)
+ curEntry = table(h)
}
- None
+ false
}
protected def iterator: Iterator[A] = new AbstractIterator[A] {
@@ -191,8 +206,8 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
i < table.length
}
def next(): A =
- if (hasNext) { i += 1; table(i - 1).asInstanceOf[A] }
- else Iterator.empty.next
+ if (hasNext) { i += 1; entryToElem(table(i - 1)) }
+ else Iterator.empty.next()
}
private def growTable() {
@@ -205,7 +220,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
var i = 0
while (i < oldtable.length) {
val entry = oldtable(i)
- if (null != entry) addEntry(entry.asInstanceOf[A])
+ if (null != entry) addEntry(entry)
i += 1
}
if (tableDebug) checkConsistent()
@@ -213,9 +228,10 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
private def checkConsistent() {
for (i <- 0 until table.length)
- if (table(i) != null && !containsEntry(table(i).asInstanceOf[A]))
- assert(false, i+" "+table(i)+" "+table.mkString)
+ if (table(i) != null && !containsElem(entryToElem(table(i))))
+ assert(assertion = false, i+" "+table(i)+" "+table.mkString)
}
+
/* Size map handling code */
@@ -265,7 +281,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
val totalbuckets = totalSizeMapBuckets
var bucketidx = 0
var tableidx = 0
- var tbl = table
+ val tbl = table
var tableuntil = sizeMapBucketSize min tbl.length
while (bucketidx < totalbuckets) {
var currbucketsz = 0
@@ -341,7 +357,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] {
seedvalue = c.seedvalue
sizemap = c.sizemap
}
- if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
+ if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
}
}
@@ -358,6 +374,11 @@ private[collection] object FlatHashTable {
final def seedGenerator = new ThreadLocal[scala.util.Random] {
override def initialValue = new scala.util.Random
}
+
+ private object NullSentinel {
+ override def hashCode = 0
+ override def toString = "NullSentinel"
+ }
/** The load factor for the hash table; must be < 500 (0.5)
*/
@@ -386,10 +407,6 @@ private[collection] object FlatHashTable {
// so that:
protected final def sizeMapBucketSize = 1 << sizeMapBucketBitSize
- protected def elemHashCode(elem: A) =
- if (elem == null) throw new IllegalArgumentException("Flat hash tables cannot contain null elements.")
- else elem.hashCode()
-
protected final def improve(hcode: Int, seed: Int) = {
//var h: Int = hcode + ~(hcode << 9)
//h = h ^ (h >>> 14)
@@ -404,6 +421,19 @@ private[collection] object FlatHashTable {
val rotated = (improved >>> rotation) | (improved << (32 - rotation))
rotated
}
+
+ /**
+ * Elems have type A, but we store AnyRef in the table. Plus we need to deal with
+ * null elems, which need to be stored as NullSentinel
+ */
+ protected final def elemToEntry(elem : A) : AnyRef =
+ if (null == elem) NullSentinel else elem.asInstanceOf[AnyRef]
+
+ /**
+ * Does the inverse translation of elemToEntry
+ */
+ protected final def entryToElem(entry : AnyRef) : A =
+ (if (entry.isInstanceOf[NullSentinel.type]) null else entry).asInstanceOf[A]
}
}
diff --git a/src/library/scala/collection/mutable/GenIterable.scala.disabled b/src/library/scala/collection/mutable/GenIterable.scala.disabled
deleted file mode 100644
index 9acfccdae8..0000000000
--- a/src/library/scala/collection/mutable/GenIterable.scala.disabled
+++ /dev/null
@@ -1,37 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A base trait for iterable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $iterableInfo
- */
-trait GenIterable[A] extends GenTraversable[A]
- with scala.collection.GenIterable[A]
- with scala.collection.GenIterableLike[A, GenIterable[A]]
-// with GenericTraversableTemplate[A, GenIterable]
-{
- def seq: Iterable[A]
- //override def companion: GenericCompanion[GenIterable] = GenIterable
-}
-
-
-// object GenIterable extends TraversableFactory[GenIterable] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenIterable[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenIterable[A]] = Iterable.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/mutable/GenMap.scala.disabled b/src/library/scala/collection/mutable/GenMap.scala.disabled
deleted file mode 100644
index e4fd1dad64..0000000000
--- a/src/library/scala/collection/mutable/GenMap.scala.disabled
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A base trait for maps that can be mutated.
- * $possiblyparinfo
- * $mapNote
- * $mapTags
- * @since 1.0
- * @author Matthias Zenger
- */
-trait GenMap[A, B]
-extends GenIterable[(A, B)]
- with scala.collection.GenMap[A, B]
- with scala.collection.GenMapLike[A, B, GenMap[A, B]]
-{
- def seq: Map[A, B]
-}
-
-
-// object GenMap extends MapFactory[GenMap] {
-// def empty[A, B]: Map[A, B] = Map.empty
-
-// /** $mapCanBuildFromInfo */
-// implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B]
-// }
-
diff --git a/src/library/scala/collection/mutable/GenSeq.scala.disabled b/src/library/scala/collection/mutable/GenSeq.scala.disabled
deleted file mode 100644
index ec904723a5..0000000000
--- a/src/library/scala/collection/mutable/GenSeq.scala.disabled
+++ /dev/null
@@ -1,44 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A subtrait of `collection.GenSeq` which represents sequences
- * that can be mutated.
- *
- * $possiblyparinfo
- *
- * $seqInfo
- *
- * The class adds an `update` method to `collection.Seq`.
- *
- * @define Coll `mutable.Seq`
- * @define coll mutable sequence
- */
-trait GenSeq[A] extends GenIterable[A]
- with scala.collection.GenSeq[A]
- with scala.collection.GenSeqLike[A, GenSeq[A]]
-// with GenericTraversableTemplate[A, GenSeq]
-{
- //override def companion: GenericCompanion[GenSeq] = GenSeq
- def seq: Seq[A]
-}
-
-
-// object GenSeq extends SeqFactory[GenSeq] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSeq[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenSeq[A]] = Seq.newBuilder
-// }
-
diff --git a/src/library/scala/collection/mutable/GenSet.scala.disabled b/src/library/scala/collection/mutable/GenSet.scala.disabled
deleted file mode 100644
index dec20e2a46..0000000000
--- a/src/library/scala/collection/mutable/GenSet.scala.disabled
+++ /dev/null
@@ -1,46 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-
-import generic._
-
-
-/** A generic trait for mutable sets.
- *
- * $possiblyparinfo
- * $setNote
- * $setTags
- *
- * @since 1.0
- * @author Matthias Zenger
- * @define Coll `mutable.Set`
- * @define coll mutable set
- */
-trait GenSet[A] extends GenIterable[A]
- with Growable[A]
- with scala.collection.GenSet[A]
- with scala.collection.GenSetLike[A, GenSet[A]]
-// with GenericSetTemplate[A, GenSet]
-{
- //override def companion: GenericCompanion[GenSet] = GenSet
- def seq: Set[A]
-}
-
-
-// object GenSet extends TraversableFactory[GenSet] {
-// implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, GenSet[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A]: Builder[A, GenSet[A]] = Set.newBuilder
-// }
-
-
diff --git a/src/library/scala/collection/mutable/GenTraversable.scala.disabled b/src/library/scala/collection/mutable/GenTraversable.scala.disabled
deleted file mode 100644
index 2453e2ce87..0000000000
--- a/src/library/scala/collection/mutable/GenTraversable.scala.disabled
+++ /dev/null
@@ -1,38 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.collection
-package mutable
-
-
-import generic._
-
-
-/** A trait for traversable collections that can be mutated.
- *
- * $possiblyparinfo
- *
- * $traversableInfo
- * @define mutability mutable
- */
-trait GenTraversable[A] extends scala.collection.GenTraversable[A]
- with scala.collection.GenTraversableLike[A, GenTraversable[A]]
-// with GenericTraversableTemplate[A, GenTraversable]
- with Mutable
-{
- def seq: Traversable[A]
- //override def companion: GenericCompanion[GenTraversable] = GenTraversable
-}
-
-// object GenTraversable extends TraversableFactory[GenTraversable] {
-// implicit def canBuildFrom[A] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
-// def newBuilder[A] = Traversable.newBuilder
-// }
-
diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala
index ba7ea60df1..c4b5e546aa 100644
--- a/src/library/scala/collection/mutable/GrowingBuilder.scala
+++ b/src/library/scala/collection/mutable/GrowingBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala
index 5cd976eb47..4c0f6a93e8 100644
--- a/src/library/scala/collection/mutable/HashEntry.scala
+++ b/src/library/scala/collection/mutable/HashEntry.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** Class used internally.
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index 3cd7f07d83..6fca75ffea 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -93,9 +94,9 @@ extends AbstractMap[A, B]
def -=(key: A): this.type = { removeEntry(key); this }
- def iterator = entriesIterator map {e => (e.key, e.value)}
+ def iterator = entriesIterator map (e => ((e.key, e.value)))
- override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
+ override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f((e.key, e.value)))
/* Override to avoid tuple allocation in foreach */
override def keySet: scala.collection.Set[A] = new DefaultKeySet {
@@ -111,21 +112,21 @@ extends AbstractMap[A, B]
override def keysIterator: Iterator[A] = new AbstractIterator[A] {
val iter = entriesIterator
def hasNext = iter.hasNext
- def next() = iter.next.key
+ def next() = iter.next().key
}
/* Override to avoid tuple allocation */
override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
val iter = entriesIterator
def hasNext = iter.hasNext
- def next() = iter.next.value
+ def next() = iter.next().value
}
/** Toggles whether a size map is used to track hash map statistics.
*/
def useSizeMap(t: Boolean) = if (t) {
- if (!isSizeMapDefined) sizeMapInitAndRebuild
- } else sizeMapDisable
+ if (!isSizeMapDefined) sizeMapInitAndRebuild()
+ } else sizeMapDisable()
protected def createNewEntry[B1](key: A, value: B1): Entry = {
new Entry(key, value.asInstanceOf[B])
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index c60e363f8f..886fee5a59 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -16,8 +17,6 @@ import scala.collection.parallel.mutable.ParHashSet
/** This class implements mutable sets using a hashtable.
*
- * $cannotStoreNull
- *
* @author Matthias Zenger
* @author Martin Odersky
* @version 2.0, 31/12/2006
@@ -55,17 +54,17 @@ extends AbstractSet[A]
override def size: Int = tableSize
- def contains(elem: A): Boolean = containsEntry(elem)
+ def contains(elem: A): Boolean = containsElem(elem)
- def += (elem: A): this.type = { addEntry(elem); this }
+ def += (elem: A): this.type = { addElem(elem); this }
- def -= (elem: A): this.type = { removeEntry(elem); this }
+ def -= (elem: A): this.type = { removeElem(elem); this }
override def par = new ParHashSet(hashTableContents)
- override def add(elem: A): Boolean = addEntry(elem)
+ override def add(elem: A): Boolean = addElem(elem)
- override def remove(elem: A): Boolean = removeEntry(elem).isDefined
+ override def remove(elem: A): Boolean = removeElem(elem)
override def clear() { clearTable() }
@@ -75,8 +74,8 @@ extends AbstractSet[A]
var i = 0
val len = table.length
while (i < len) {
- val elem = table(i)
- if (elem ne null) f(elem.asInstanceOf[A])
+ val curEntry = table(i)
+ if (curEntry ne null) f(entryToElem(curEntry))
i += 1
}
}
@@ -94,8 +93,8 @@ extends AbstractSet[A]
/** Toggles whether a size map is used to track hash map statistics.
*/
def useSizeMap(t: Boolean) = if (t) {
- if (!isSizeMapDefined) sizeMapInitAndRebuild
- } else sizeMapDisable
+ if (!isSizeMapDefined) sizeMapInitAndRebuild()
+ } else sizeMapDisable()
}
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 8fef1be66b..0479b51830 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** This class can be used to construct data structures that are based
@@ -96,7 +97,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
val smDefined = in.readBoolean()
table = new Array(capacity(sizeForThreshold(_loadFactor, size)))
- threshold = newThreshold(_loadFactor, table.size)
+ threshold = newThreshold(_loadFactor, table.length)
if (smDefined) sizeMapInit(table.length) else sizemap = null
@@ -365,7 +366,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
seedvalue = c.seedvalue
sizemap = c.sizemap
}
- if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild
+ if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild()
}
private[collection] def hashTableContents = new HashTable.Contents(
@@ -382,7 +383,7 @@ private[collection] object HashTable {
/** The load factor for the hash table (in 0.001 step).
*/
private[collection] final def defaultLoadFactor: Int = 750 // corresponds to 75%
- private[collection] final def loadFactorDenum = 1000;
+ private[collection] final def loadFactorDenum = 1000
private[collection] final def newThreshold(_loadFactor: Int, size: Int) = ((size.toLong * _loadFactor) / loadFactorDenum).toInt
@@ -457,13 +458,13 @@ private[collection] object HashTable {
*/
private[collection] def powerOfTwo(target: Int): Int = {
/* See http://bits.stephan-brumme.com/roundUpToNextPowerOfTwo.html */
- var c = target - 1;
- c |= c >>> 1;
- c |= c >>> 2;
- c |= c >>> 4;
- c |= c >>> 8;
- c |= c >>> 16;
- c + 1;
+ var c = target - 1
+ c |= c >>> 1
+ c |= c >>> 2
+ c |= c >>> 4
+ c |= c >>> 8
+ c |= c >>> 16
+ c + 1
}
class Contents[A, Entry >: Null <: HashEntry[A, Entry]](
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index c1d94a904c..19148c0ac2 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/tPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/tPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
@@ -41,7 +42,7 @@ extends AbstractIterable[(Pub, Evt)]
*/
def notify(pub: Pub, event: Evt) {
if (log.length >= maxHistory)
- log.dequeue
+ log.dequeue()
log.enqueue((pub, event))
}
@@ -50,7 +51,7 @@ extends AbstractIterable[(Pub, Evt)]
def iterator: Iterator[(Pub, Evt)] = log.iterator
def events: Iterator[Evt] = log.iterator map (_._2)
- def clear() { log.clear }
+ def clear() { log.clear() }
/** Checks if two history objects are structurally identical.
*
@@ -60,5 +61,5 @@ extends AbstractIterable[(Pub, Evt)]
case that: History[_, _] => this.log equals that.log
case _ => false
}
- override def hashCode = log.hashCode
+ override def hashCode = log.hashCode()
}
diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
index 755eea831f..88e9e9db8f 100644
--- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.migration
diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
index 42c757d3bf..3e64747832 100644
--- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
+++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala
index 4d094e697b..3d9630eea7 100644
--- a/src/library/scala/collection/mutable/IndexedSeq.scala
+++ b/src/library/scala/collection/mutable/IndexedSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala
index f0c31ec7fb..4cf794c32f 100644
--- a/src/library/scala/collection/mutable/IndexedSeqLike.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala
@@ -6,11 +6,9 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
/** A subtrait of scala.collection.IndexedSeq which represents sequences
* that can be mutated.
diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
index cb7e8efdc7..09f0712862 100755
--- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala
@@ -6,11 +6,9 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
/** A subtrait of scala.collection.IndexedSeq which represents sequences
* that can be mutated.
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index cf5166eea8..36c1076b87 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -82,8 +83,6 @@ self =>
protected override def newTakenWhile(p: A => Boolean): Transformed[A] = new { val pred = p } with AbstractTransformed[A] with TakenWhile
protected override def newReversed: Transformed[A] = new AbstractTransformed[A] with Reversed
- private implicit def asThis(xs: Transformed[A]): This = xs.asInstanceOf[This]
-
override def filter(p: A => Boolean): This = newFiltered(p)
override def init: This = newSliced(SliceInterval(0, self.length - 1))
override def drop(n: Int): This = newSliced(SliceInterval(n, self.length))
diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala
index b79453e4e9..f7a794e357 100644
--- a/src/library/scala/collection/mutable/Iterable.scala
+++ b/src/library/scala/collection/mutable/Iterable.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala
index 0b56c86ac4..ebee38b77f 100644
--- a/src/library/scala/collection/mutable/LazyBuilder.scala
+++ b/src/library/scala/collection/mutable/LazyBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A builder that constructs its result lazily. Iterators or iterables to
diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala
index f241a2f6d4..3fa10042ef 100644
--- a/src/library/scala/collection/mutable/LinearSeq.scala
+++ b/src/library/scala/collection/mutable/LinearSeq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala
index e4e29122d8..296e7fde18 100644
--- a/src/library/scala/collection/mutable/LinkedEntry.scala
+++ b/src/library/scala/collection/mutable/LinkedEntry.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** Class for the linked hash map entry, used internally.
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index da2c36ac2d..536f320402 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -92,7 +93,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
def hasNext = cur ne null
def next =
if (hasNext) { val res = (cur.key, cur.value); cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
protected class FilteredKeys(p: A => Boolean) extends super.FilteredKeys(p) {
@@ -118,7 +119,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
def hasNext = cur ne null
def next =
if (hasNext) { val res = cur.key; cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def valuesIterator: Iterator[B] = new AbstractIterator[B] {
@@ -126,7 +127,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
def hasNext = cur ne null
def next =
if (hasNext) { val res = cur.value; cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def foreach[U](f: ((A, B)) => U) {
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 1723258433..d89566793f 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -78,7 +79,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
def hasNext = cur ne null
def next =
if (hasNext) { val res = cur.key; cur = cur.later; res }
- else Iterator.empty.next
+ else Iterator.empty.next()
}
override def foreach[U](f: A => U) {
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index 29e6fdd375..a772f86d78 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 4f63ede7ca..6d4565026f 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
import scala.annotation.tailrec
/** This extensible class may be used as a basis for implementing linked
@@ -188,6 +186,6 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
override def clone(): This = {
val bf = newBuilder
bf ++= this
- bf.result
+ bf.result()
}
}
diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala
index 67af4a6bd6..7f54692c8b 100644
--- a/src/library/scala/collection/mutable/ListBuffer.scala
+++ b/src/library/scala/collection/mutable/ListBuffer.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -56,12 +55,18 @@ final class ListBuffer[A]
import scala.collection.Traversable
import scala.collection.immutable.ListSerializeEnd
+ /** Expected invariants:
+ * If start.isEmpty, last0 == null
+ * If start.nonEmpty, last0 != null
+ * If len == 0, start.isEmpty
+ * If len > 0, start.nonEmpty
+ */
private var start: List[A] = Nil
private var last0: ::[A] = _
private var exported: Boolean = false
private var len = 0
- protected def underlying: immutable.Seq[A] = start
+ protected def underlying: List[A] = start
private def writeObject(out: ObjectOutputStream) {
// write start
@@ -133,7 +138,7 @@ final class ListBuffer[A]
if (n < 0 || n >= len) throw new IndexOutOfBoundsException(n.toString)
if (exported) copy()
if (n == 0) {
- val newElem = new :: (x, start.tail);
+ val newElem = new :: (x, start.tail)
if (last0 eq start) {
last0 = newElem
}
@@ -160,7 +165,7 @@ final class ListBuffer[A]
*/
def += (x: A): this.type = {
if (exported) copy()
- if (start.isEmpty) {
+ if (isEmpty) {
last0 = new :: (x, Nil)
start = last0
} else {
@@ -172,8 +177,11 @@ final class ListBuffer[A]
this
}
- override def ++=(xs: TraversableOnce[A]): this.type =
- if (xs.asInstanceOf[AnyRef] eq this) ++= (this take size) else super.++=(xs)
+ override def ++=(xs: TraversableOnce[A]): this.type = xs match {
+ case x: AnyRef if x eq this => this ++= (this take size)
+ case _ => super.++=(xs)
+
+ }
override def ++=:(xs: TraversableOnce[A]): this.type =
if (xs.asInstanceOf[AnyRef] eq this) ++=: (this take size) else super.++=:(xs)
@@ -182,6 +190,7 @@ final class ListBuffer[A]
*/
def clear() {
start = Nil
+ last0 = null
exported = false
len = 0
}
@@ -195,7 +204,7 @@ final class ListBuffer[A]
def +=: (x: A): this.type = {
if (exported) copy()
val newElem = new :: (x, start)
- if (start.isEmpty) last0 = newElem
+ if (isEmpty) last0 = newElem
start = newElem
len += 1
this
@@ -238,13 +247,22 @@ final class ListBuffer[A]
}
}
+ /** Reduce the length of the buffer, and null out last0
+ * if this reduces the length to 0.
+ */
+ private def reduceLengthBy(num: Int) {
+ len -= num
+ if (len <= 0) // obviously shouldn't be < 0, but still better not to leak
+ last0 = null
+ }
+
/** Removes a given number of elements on a given index position. May take
* time linear in the buffer size.
*
* @param n the index which refers to the first element to remove.
* @param count the number of elements to remove.
*/
- @annotation.migration("Invalid input values will be rejected in future releases.", "2.11")
+ @scala.annotation.migration("Invalid input values will be rejected in future releases.", "2.11")
override def remove(n: Int, count: Int) {
if (n >= len)
return
@@ -253,7 +271,6 @@ final class ListBuffer[A]
if (exported) copy()
val n1 = n max 0
val count1 = count min (len - n1)
- var old = start.head
if (n1 == 0) {
var c = count1
while (c > 0) {
@@ -274,7 +291,7 @@ final class ListBuffer[A]
c -= 1
}
}
- len -= count1
+ reduceLengthBy(count1)
}
// Implementation of abstract method in Builder
@@ -285,7 +302,7 @@ final class ListBuffer[A]
* copied lazily, the first time it is mutated.
*/
override def toList: List[A] = {
- exported = !start.isEmpty
+ exported = !isEmpty
start
}
@@ -296,7 +313,7 @@ final class ListBuffer[A]
* @param xs the list to which elements are prepended
*/
def prependToList(xs: List[A]): List[A] = {
- if (start.isEmpty) xs
+ if (isEmpty) xs
else {
if (exported) copy()
last0.tl = xs
@@ -331,7 +348,7 @@ final class ListBuffer[A]
if (last0 eq cursor.tail) last0 = cursor.asInstanceOf[::[A]]
cursor.asInstanceOf[::[A]].tl = cursor.tail.tail
}
- len -= 1
+ reduceLengthBy(1)
old
}
@@ -343,11 +360,12 @@ final class ListBuffer[A]
*/
override def -= (elem: A): this.type = {
if (exported) copy()
- if (start.isEmpty) {}
+ if (isEmpty) {}
else if (start.head == elem) {
start = start.tail
- len -= 1
- } else {
+ reduceLengthBy(1)
+ }
+ else {
var cursor = start
while (!cursor.tail.isEmpty && cursor.tail.head != elem) {
cursor = cursor.tail
@@ -357,7 +375,7 @@ final class ListBuffer[A]
if (z.tl == last0)
last0 = z
z.tl = cursor.tail.tail
- len -= 1
+ reduceLengthBy(1)
}
}
this
@@ -397,6 +415,7 @@ final class ListBuffer[A]
/** Copy contents of this buffer */
private def copy() {
+ if (isEmpty) return
var cursor = start
val limit = last0.tail
clear()
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 7f05deffc8..0b5d34cd81 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala
index f72e1fc4e7..7280aaec25 100644
--- a/src/library/scala/collection/mutable/Map.scala
+++ b/src/library/scala/collection/mutable/Map.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -47,7 +48,7 @@ trait Map[A, B]
*/
def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d)
- /** Return a read-only projection of this map. !!! or just use an (immutable) MapProxy?
+ /* Return a read-only projection of this map. !!! or just use an (immutable) MapProxy?
def readOnly : scala.collection.Map[A, B] = new scala.collection.Map[A, B] {
override def size = self.size
override def update(key: A, value: B) = self.update(key, value)
diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala
index 8468e09e4d..a5a6b12ea9 100644
--- a/src/library/scala/collection/mutable/MapBuilder.scala
+++ b/src/library/scala/collection/mutable/MapBuilder.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** The canonical builder for immutable maps, working with the map's `+` method
diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala
index a53aa3b76a..dbe32fdb79 100644
--- a/src/library/scala/collection/mutable/MapLike.scala
+++ b/src/library/scala/collection/mutable/MapLike.scala
@@ -7,12 +7,13 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
-import scala.annotation.{migration, bridge}
-import parallel.mutable.ParMap
+import scala.annotation.migration
+import scala.collection.parallel.mutable.ParMap
/** A template trait for mutable maps.
* $mapNote
@@ -50,8 +51,6 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]]
with Parallelizable[(A, B), ParMap[A, B]]
{ self =>
- import scala.collection.Traversable
-
/** A common implementation of `newBuilder` for all mutable maps
* in terms of `empty`.
*
diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala
index c730e2b7c8..e4f106731c 100644
--- a/src/library/scala/collection/mutable/MapProxy.scala
+++ b/src/library/scala/collection/mutable/MapProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/**
diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala
index 4635bfbe64..78dfc35268 100644
--- a/src/library/scala/collection/mutable/MultiMap.scala
+++ b/src/library/scala/collection/mutable/MultiMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala
index bc6272bfdb..5727b12975 100644
--- a/src/library/scala/collection/mutable/MutableList.scala
+++ b/src/library/scala/collection/mutable/MutableList.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -61,8 +62,7 @@ extends AbstractSeq[A]
tl
}
- // this method must be private for binary compatibility
- private final def tailImpl(tl: MutableList[A]) {
+ protected final def tailImpl(tl: MutableList[A]) {
require(nonEmpty, "tail of empty list")
tl.first0 = first0.tail
tl.len = len - 1
@@ -149,7 +149,7 @@ extends AbstractSeq[A]
override def clone(): MutableList[A] = {
val bf = newBuilder
bf ++= seq
- bf.result
+ bf.result()
}
}
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index bcaf977727..7fe794caaf 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
@@ -65,7 +66,7 @@ trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoa
}
abstract override def clear(): Unit = {
- super.clear
+ super.clear()
publish(new Reset with Undoable {
def undo() { throw new UnsupportedOperationException("cannot undo") }
})
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index d81c90bf4c..0dec6fa516 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
@@ -60,7 +61,7 @@ trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with
}
abstract override def clear(): Unit = {
- super.clear
+ super.clear()
publish(new Reset with Undoable {
def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
})
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index 3e79506413..acb6d92c8c 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
@@ -44,7 +45,7 @@ trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
}
abstract override def clear(): Unit = {
- super.clear
+ super.clear()
publish(new Reset with Undoable {
def undo(): Unit = throw new UnsupportedOperationException("cannot undo")
})
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 8b3e52470a..a0aea43121 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -27,7 +27,7 @@ object OpenHashMap {
var value: Option[Value])
extends HashEntry[Key, OpenEntry[Key, Value]]
- private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
+ private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1
}
/** A mutable hash map based on an open hashing scheme. The precise scheme is
@@ -78,8 +78,8 @@ extends AbstractMap[Key, Value]
/** Returns a mangled hash code of the provided key. */
protected def hashOf(key: Key) = {
var h = key.##
- h ^= ((h >>> 20) ^ (h >>> 12));
- h ^ (h >>> 7) ^ (h >>> 4);
+ h ^= ((h >>> 20) ^ (h >>> 12))
+ h ^ (h >>> 7) ^ (h >>> 4)
}
private[this] def growTable() = {
@@ -89,7 +89,7 @@ extends AbstractMap[Key, Value]
table = new Array[Entry](newSize)
mask = newSize - 1
oldTable.foreach( entry =>
- if (entry != null && entry.value != None) addEntry(entry));
+ if (entry != null && entry.value != None) addEntry(entry))
deleted = 0
}
@@ -124,18 +124,18 @@ extends AbstractMap[Key, Value]
put(key, hashOf(key), value)
private def put(key: Key, hash: Int, value: Value): Option[Value] = {
- if (2 * (size + deleted) > mask) growTable
+ if (2 * (size + deleted) > mask) growTable()
val index = findIndex(key, hash)
val entry = table(index)
if (entry == null) {
- table(index) = new OpenEntry(key, hash, Some(value));
+ table(index) = new OpenEntry(key, hash, Some(value))
modCount += 1
size += 1
None
} else {
val res = entry.value
if (entry.value == None) { size += 1; modCount += 1 }
- entry.value = Some(value);
+ entry.value = Some(value)
res
}
}
@@ -161,13 +161,13 @@ extends AbstractMap[Key, Value]
while(entry != null){
if (entry.hash == hash &&
entry.key == key){
- return entry.value;
+ return entry.value
}
- j = 5 * j + 1 + perturb;
- perturb >>= 5;
- index = j & mask;
- entry = table(index);
+ j = 5 * j + 1 + perturb
+ perturb >>= 5
+ index = j & mask
+ entry = table(index)
}
None
}
@@ -182,8 +182,8 @@ extends AbstractMap[Key, Value]
val initialModCount = modCount
private[this] def advance() {
- if (initialModCount != modCount) sys.error("Concurrent modification");
- while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1;
+ if (initialModCount != modCount) sys.error("Concurrent modification")
+ while((index <= mask) && (table(index) == null || table(index).value == None)) index+=1
}
def hasNext = {advance(); index <= mask }
@@ -198,7 +198,7 @@ extends AbstractMap[Key, Value]
override def clone() = {
val it = new OpenHashMap[Key, Value]
- foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
+ foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get))
it
}
@@ -213,24 +213,24 @@ extends AbstractMap[Key, Value]
* @param f The function to apply to each key, value mapping.
*/
override def foreach[U](f : ((Key, Value)) => U) {
- val startModCount = modCount;
+ val startModCount = modCount
foreachUndeletedEntry(entry => {
if (modCount != startModCount) sys.error("Concurrent Modification")
f((entry.key, entry.value.get))}
- );
+ )
}
private[this] def foreachUndeletedEntry(f : Entry => Unit){
- table.foreach(entry => if (entry != null && entry.value != None) f(entry));
+ table.foreach(entry => if (entry != null && entry.value != None) f(entry))
}
override def transform(f : (Key, Value) => Value) = {
- foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)));
+ foreachUndeletedEntry(entry => entry.value = Some(f(entry.key, entry.value.get)))
this
}
override def retain(f : (Key, Value) => Boolean) = {
- foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} );
+ foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} )
this
}
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index 84257c6e97..b5b1c1d006 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -134,11 +133,11 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
throw new NoSuchElementException("no element to remove from heap")
def dequeueAll[A1 >: A, That](implicit bf: CanBuildFrom[_, A1, That]): That = {
- val b = bf.apply
+ val b = bf.apply()
while (nonEmpty) {
b += dequeue()
}
- b.result
+ b.result()
}
/** Returns the element with the highest priority in the queue,
@@ -146,14 +145,6 @@ class PriorityQueue[A](implicit val ord: Ordering[A])
*
* @return the element with the highest priority.
*/
- @deprecated("Use `head` instead.", "2.9.0")
- def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
-
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
override def head: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
/** Removes all elements from the queue. After this operation is completed,
diff --git a/src/library/scala/collection/mutable/PriorityQueueProxy.scala b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
index 3bb5d32cf8..4cb49be9b8 100644
--- a/src/library/scala/collection/mutable/PriorityQueueProxy.scala
+++ b/src/library/scala/collection/mutable/PriorityQueueProxy.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** This class servers as a proxy for priority queues. The
@@ -66,7 +67,7 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*
* @return the element with the highest priority.
*/
- override def dequeue(): A = self.dequeue
+ override def dequeue(): A = self.dequeue()
/** Returns the element with the highest priority in the queue,
* or throws an error if there is no element contained in the queue.
@@ -75,18 +76,10 @@ abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends Priority
*/
override def head: A = self.head
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
- @deprecated("Use `head` instead.", "2.9.0")
- override def max: A = self.max
-
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = self.clear
+ override def clear(): Unit = self.clear()
/** Returns a regular queue containing the same elements.
*/
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index e31205b477..22bbea16ef 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
@@ -45,7 +46,7 @@ trait Publisher[Evt] {
def suspendSubscription(sub: Sub) { suspended += sub }
def activateSubscription(sub: Sub) { suspended -= sub }
def removeSubscription(sub: Sub) { filters -= sub }
- def removeSubscriptions() { filters.clear }
+ def removeSubscriptions() { filters.clear() }
protected def publish(event: Evt) {
filters.keys.foreach(sub =>
diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala
index 8ef5f6aeb7..e80aae2171 100644
--- a/src/library/scala/collection/mutable/Queue.scala
+++ b/src/library/scala/collection/mutable/Queue.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
@@ -167,13 +168,6 @@ extends MutableList[A]
*/
def front: A = head
- // this method (duplicated from MutableList) must be private for binary compatibility
- private final def tailImpl(tl: Queue[A]) {
- require(nonEmpty, "tail of empty list")
- tl.first0 = first0.tail
- tl.len = len - 1
- tl.last0 = if (tl.len == 0) tl.first0 else last0
- }
// TODO - Don't override this just for new to create appropriate type....
override def tail: Queue[A] = {
@@ -185,7 +179,7 @@ extends MutableList[A]
override def clone(): Queue[A] = {
val bf = newBuilder
bf ++= seq
- bf.result
+ bf.result()
}
private[this] def decrementLength() {
diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala
index c286a340e3..bfc5edbeff 100644
--- a/src/library/scala/collection/mutable/QueueProxy.scala
+++ b/src/library/scala/collection/mutable/QueueProxy.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** `Queue` objects implement data structures that allow to
@@ -67,7 +68,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
*
* @return the first element of the queue.
*/
- override def dequeue(): A = self.dequeue
+ override def dequeue(): A = self.dequeue()
/** Returns the first element in the queue, or throws an error if there
* is no element contained in the queue.
@@ -79,7 +80,7 @@ trait QueueProxy[A] extends Queue[A] with Proxy {
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = self.clear
+ override def clear(): Unit = self.clear()
/** Returns an iterator over all elements on the queue.
*
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 5544a21a55..725a8113ec 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
@@ -30,7 +31,7 @@ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Und
*/
def undo(): Unit = {
val old = log.toList.reverse
- clear
- old.foreach { case (sub, event) => event.undo }
+ clear()
+ old.foreach { case (sub, event) => event.undo() }
}
}
diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala
index 9d9399ebb4..11fbdd13f3 100644
--- a/src/library/scala/collection/mutable/Seq.scala
+++ b/src/library/scala/collection/mutable/Seq.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala
index 447100cf4c..6987066f2b 100644
--- a/src/library/scala/collection/mutable/SeqLike.scala
+++ b/src/library/scala/collection/mutable/SeqLike.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
import parallel.mutable.ParSeq
/** A template trait for mutable sequences of type `mutable.Seq[A]`.
diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala
index 023ff63056..4439880976 100644
--- a/src/library/scala/collection/mutable/Set.scala
+++ b/src/library/scala/collection/mutable/Set.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala
index 42fd651d41..01bfdc96ed 100644
--- a/src/library/scala/collection/mutable/SetBuilder.scala
+++ b/src/library/scala/collection/mutable/SetBuilder.scala
@@ -6,12 +6,10 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
-
/** The canonical builder for mutable Sets.
*
* @tparam A The type of the elements that will be contained in this set.
diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala
index 01f87447ae..073bc59f61 100644
--- a/src/library/scala/collection/mutable/SetLike.scala
+++ b/src/library/scala/collection/mutable/SetLike.scala
@@ -6,12 +6,13 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
import script._
-import scala.annotation.{ migration, bridge }
+import scala.annotation.migration
import parallel.mutable.ParSet
/** A template trait for mutable sets of type `mutable.Set[A]`.
@@ -210,7 +211,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]]
def <<(cmd: Message[A]): Unit = cmd match {
case Include(_, x) => this += x
case Remove(_, x) => this -= x
- case Reset() => clear
+ case Reset() => clear()
case s: Script[_] => s.iterator foreach <<
case _ => throw new UnsupportedOperationException("message " + cmd + " not understood")
}
diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala
index c9f297509f..9568a54276 100644
--- a/src/library/scala/collection/mutable/SetProxy.scala
+++ b/src/library/scala/collection/mutable/SetProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** This is a simple wrapper class for [[scala.collection.mutable.Set]].
diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala
index 41f2c6e39f..0f2fa75abd 100644
--- a/src/library/scala/collection/mutable/SortedSet.scala
+++ b/src/library/scala/collection/mutable/SortedSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 6eef250f9d..34c6d1fbb9 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala
index 16f13ff42c..7d776b99c3 100644
--- a/src/library/scala/collection/mutable/StackProxy.scala
+++ b/src/library/scala/collection/mutable/StackProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** A stack implements a data structure which allows to store and retrieve
@@ -69,13 +70,13 @@ trait StackProxy[A] extends Stack[A] with Proxy {
/** Removes the top element from the stack.
*/
- override def pop(): A = self.pop
+ override def pop(): A = self.pop()
/**
* Removes all elements from the stack. After this operation completed,
* the stack will be empty.
*/
- override def clear(): Unit = self.clear
+ override def clear(): Unit = self.clear()
/** Returns an iterator over all elements on the stack. This iterator
* is stable with respect to state changes in the stack object; i.e.
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 4d269a95b1..498e9e461e 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import java.lang.{ StringBuilder => JavaStringBuilder }
@@ -255,8 +256,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
* @return This StringBuilder.
*/
def append(x: Boolean): StringBuilder = { underlying append x ; this }
- def append(x: Byte): StringBuilder = { underlying append x ; this }
- def append(x: Short): StringBuilder = { underlying append x ; this }
+ def append(x: Byte): StringBuilder = append(x.toInt)
+ def append(x: Short): StringBuilder = append(x.toInt)
def append(x: Int): StringBuilder = { underlying append x ; this }
def append(x: Long): StringBuilder = { underlying append x ; this }
def append(x: Float): StringBuilder = { underlying append x ; this }
@@ -359,8 +360,8 @@ final class StringBuilder(private val underlying: JavaStringBuilder)
* @return this StringBuilder.
*/
def insert(index: Int, x: Boolean): StringBuilder = insert(index, String.valueOf(x))
- def insert(index: Int, x: Byte): StringBuilder = insert(index, String.valueOf(x))
- def insert(index: Int, x: Short): StringBuilder = insert(index, String.valueOf(x))
+ def insert(index: Int, x: Byte): StringBuilder = insert(index, x.toInt)
+ def insert(index: Int, x: Short): StringBuilder = insert(index, x.toInt)
def insert(index: Int, x: Int): StringBuilder = insert(index, String.valueOf(x))
def insert(index: Int, x: Long): StringBuilder = insert(index, String.valueOf(x))
def insert(index: Int, x: Float): StringBuilder = insert(index, String.valueOf(x))
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index 35d31d7316..c2aa9be72d 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
/** `Subscriber[A, B]` objects may subscribe to events of type `A`
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index bf9a70c5b7..b97191137f 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import script._
@@ -157,7 +158,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
/** Clears the buffer contents.
*/
abstract override def clear(): Unit = synchronized {
- super.clear
+ super.clear()
}
override def <<(cmd: Message[A]): Unit = synchronized {
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 5a3562cb22..643701eda9 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.collection
+package scala
+package collection
package mutable
import scala.annotation.migration
diff --git a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
index 8dfc40b9c8..05676597db 100644
--- a/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedPriorityQueue.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
/** This class implements synchronized priority queues using a binary heap.
@@ -64,7 +65,7 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*
* @return the element with the highest priority.
*/
- override def dequeue(): A = synchronized { super.dequeue }
+ override def dequeue(): A = synchronized { super.dequeue() }
/** Returns the element with the highest priority in the queue,
* or throws an error if there is no element contained in the queue.
@@ -73,18 +74,10 @@ class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQu
*/
override def head: A = synchronized { super.head }
- /** Returns the element with the highest priority in the queue,
- * or throws an error if there is no element contained in the queue.
- *
- * @return the element with the highest priority.
- */
- @deprecated("Use `head` instead.", "2.9.0")
- override def max: A = synchronized { super.max }
-
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = synchronized { super.clear }
+ override def clear(): Unit = synchronized { super.clear() }
/** Returns an iterator which yield all the elements of the priority
* queue in descending priority order.
diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala
index 9559d5eaa5..263c06b439 100644
--- a/src/library/scala/collection/mutable/SynchronizedQueue.scala
+++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
@@ -25,8 +26,6 @@ package mutable
* @define coll synchronized queue
*/
class SynchronizedQueue[A] extends Queue[A] {
- import scala.collection.Traversable
-
/** Checks if the queue is empty.
*
* @return true, iff there is no element in the queue.
@@ -58,7 +57,7 @@ class SynchronizedQueue[A] extends Queue[A] {
*
* @return the first element of the queue.
*/
- override def dequeue(): A = synchronized { super.dequeue }
+ override def dequeue(): A = synchronized { super.dequeue() }
/** Returns the first element in the queue which satisfies the
* given predicate, and removes this element from the queue.
@@ -87,7 +86,7 @@ class SynchronizedQueue[A] extends Queue[A] {
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- override def clear(): Unit = synchronized { super.clear }
+ override def clear(): Unit = synchronized { super.clear() }
/** Checks if two queues are structurally identical.
*
diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala
index e4a44993ff..441efea5fb 100644
--- a/src/library/scala/collection/mutable/SynchronizedSet.scala
+++ b/src/library/scala/collection/mutable/SynchronizedSet.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package mutable
import script._
@@ -24,8 +25,6 @@ import script._
* @define coll synchronized set
*/
trait SynchronizedSet[A] extends Set[A] {
- import scala.collection.Traversable
-
abstract override def size: Int = synchronized {
super.size
}
@@ -71,7 +70,7 @@ trait SynchronizedSet[A] extends Set[A] {
}
abstract override def clear(): Unit = synchronized {
- super.clear
+ super.clear()
}
override def subsetOf(that: scala.collection.GenSet[A]) = synchronized {
diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala
index 5d7c9f6073..c92a68ffde 100644
--- a/src/library/scala/collection/mutable/SynchronizedStack.scala
+++ b/src/library/scala/collection/mutable/SynchronizedStack.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
@@ -67,13 +68,13 @@ class SynchronizedStack[A] extends Stack[A] {
/** Removes the top element from the stack.
*/
- override def pop(): A = synchronized { super.pop }
+ override def pop(): A = synchronized { super.pop() }
/**
* Removes all elements from the stack. After this operation completed,
* the stack will be empty.
*/
- override def clear(): Unit = synchronized { super.clear }
+ override def clear(): Unit = synchronized { super.clear() }
/** Returns an iterator over all elements on the stack. This iterator
* is stable with respect to state changes in the stack object; i.e.
diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala
index e36ffc847f..d7ea376d28 100644
--- a/src/library/scala/collection/mutable/Traversable.scala
+++ b/src/library/scala/collection/mutable/Traversable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala
index 5197af1b04..ea5b859367 100644
--- a/src/library/scala/collection/mutable/TreeSet.scala
+++ b/src/library/scala/collection/mutable/TreeSet.scala
@@ -6,10 +6,13 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
+import scala.collection.immutable.{RedBlackTree => RB}
+import scala.runtime.ObjectRef
/**
* @define Coll `mutable.TreeSet`
@@ -29,95 +32,81 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] {
}
/**
- * A mutable SortedSet using an immutable AVL Tree as underlying data structure.
+ * A mutable SortedSet using an immutable RedBlack Tree as underlying data structure.
*
* @author Lucien Pereira
*
*/
-class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with SetLike[A, TreeSet[A]]
+class TreeSet[A] private (treeRef: ObjectRef[RB.Tree[A, Null]], from: Option[A], until: Option[A])(implicit val ordering: Ordering[A])
+ extends SortedSet[A] with SetLike[A, TreeSet[A]]
with SortedSetLike[A, TreeSet[A]] with Set[A] with Serializable {
- // Projection constructor
- private def this(base: Option[TreeSet[A]], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) {
- this();
- this.base = base
- this.from = from
- this.until = until
- }
-
- private var base: Option[TreeSet[A]] = None
-
- private var from: Option[A] = None
-
- private var until: Option[A] = None
-
- private var avl: AVLTree[A] = Leaf
-
- private var cardinality: Int = 0
+ def this()(implicit ordering: Ordering[A]) = this(new ObjectRef(null), None, None)
- def resolve: TreeSet[A] = base.getOrElse(this)
-
- private def isLeftAcceptable(from: Option[A], ordering: Ordering[A])(a: A): Boolean =
- from.map(x => ordering.gteq(a, x)).getOrElse(true)
-
- private def isRightAcceptable(until: Option[A], ordering: Ordering[A])(a: A): Boolean =
- until.map(x => ordering.lt(a, x)).getOrElse(true)
-
- /**
- * Cardinality store the set size, unfortunately a
- * set view (given by rangeImpl)
- * cannot take advantage of this optimisation
- *
- */
- override def size: Int = base.map(_ => super.size).getOrElse(cardinality)
+ override def size: Int = RB.countInRange(treeRef.elem, from, until)
override def stringPrefix = "TreeSet"
override def empty: TreeSet[A] = TreeSet.empty
- override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = new TreeSet(Some(this), from, until)
+ private def pickBound(comparison: (A, A) => A, oldBound: Option[A], newBound: Option[A]) = (newBound, oldBound) match {
+ case (Some(newB), Some(oldB)) => Some(comparison(newB, oldB))
+ case (None, _) => oldBound
+ case _ => newBound
+ }
+
+ override def rangeImpl(fromArg: Option[A], untilArg: Option[A]): TreeSet[A] = {
+ val newFrom = pickBound(ordering.max, fromArg, from)
+ val newUntil = pickBound(ordering.min, untilArg, until)
+
+ new TreeSet(treeRef, newFrom, newUntil)
+ }
override def -=(elem: A): this.type = {
- try {
- resolve.avl = resolve.avl.remove(elem, ordering)
- resolve.cardinality = resolve.cardinality - 1
- } catch {
- case e: NoSuchElementException => ()
- }
+ treeRef.elem = RB.delete(treeRef.elem, elem)
this
}
override def +=(elem: A): this.type = {
- try {
- resolve.avl = resolve.avl.insert(elem, ordering)
- resolve.cardinality = resolve.cardinality + 1
- } catch {
- case e: IllegalArgumentException => ()
- }
+ treeRef.elem = RB.update(treeRef.elem, elem, null, overwrite = false)
this
}
/**
* Thanks to the immutable nature of the
- * underlying AVL Tree, we can share it with
+ * underlying Tree, we can share it with
* the clone. So clone complexity in time is O(1).
*
*/
- override def clone(): TreeSet[A] = {
- val clone = new TreeSet[A](base, from, until)
- clone.avl = resolve.avl
- clone.cardinality = resolve.cardinality
- clone
- }
+ override def clone(): TreeSet[A] =
+ new TreeSet[A](new ObjectRef(treeRef.elem), from, until)
+
+ private val notProjection = !(from.isDefined || until.isDefined)
override def contains(elem: A): Boolean = {
- isLeftAcceptable(from, ordering)(elem) &&
- isRightAcceptable(until, ordering)(elem) &&
- resolve.avl.contains(elem, ordering)
- }
+ def leftAcceptable: Boolean = from match {
+ case Some(lb) => ordering.gteq(elem, lb)
+ case _ => true
+ }
- override def iterator: Iterator[A] = resolve.avl.iterator
- .dropWhile(e => !isLeftAcceptable(from, ordering)(e))
- .takeWhile(e => isRightAcceptable(until, ordering)(e))
+ def rightAcceptable: Boolean = until match {
+ case Some(ub) => ordering.lt(elem, ub)
+ case _ => true
+ }
+
+ (notProjection || (leftAcceptable && rightAcceptable)) &&
+ RB.contains(treeRef.elem, elem)
+ }
+ override def iterator: Iterator[A] = iteratorFrom(None)
+
+ override def keysIteratorFrom(start: A) = iteratorFrom(Some(start))
+
+ private def iteratorFrom(start: Option[A]) = {
+ val it = RB.keysIterator(treeRef.elem, pickBound(ordering.max, from, start))
+ until match {
+ case None => it
+ case Some(ub) => it takeWhile (k => ordering.lt(k, ub))
+ }
+ }
}
diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala
index 0c0e8fed3e..482d618165 100644
--- a/src/library/scala/collection/mutable/Undoable.scala
+++ b/src/library/scala/collection/mutable/Undoable.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala
index 9b48c8f24f..292705c02a 100644
--- a/src/library/scala/collection/mutable/UnrolledBuffer.scala
+++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.mutable
+package scala
+package collection.mutable
import scala.collection.AbstractIterator
import scala.collection.Iterator
@@ -87,7 +88,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
// `that` is no longer usable, so clear it
// here we rely on the fact that `clear` allocates
// new nodes instead of modifying the previous ones
- that.clear
+ that.clear()
// return a reference to this
this
@@ -123,7 +124,7 @@ extends scala.collection.mutable.AbstractBuffer[T]
val r = node.array(pos)
scan()
r
- } else Iterator.empty.next
+ } else Iterator.empty.next()
}
// this should be faster than the iterator
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 70e428c9b6..433d054bfc 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package mutable
import generic._
diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala
index b83724090c..53fca9f779 100644
--- a/src/library/scala/collection/mutable/WrappedArray.scala
+++ b/src/library/scala/collection/mutable/WrappedArray.scala
@@ -8,7 +8,8 @@
-package scala.collection
+package scala
+package collection
package mutable
import scala.reflect.ClassTag
diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
index 7e0210311c..bfe95a11ab 100644
--- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala
@@ -8,10 +8,10 @@
-package scala.collection
+package scala
+package collection
package mutable
-import generic._
import scala.reflect.ClassTag
import scala.runtime.ScalaRunTime._
diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala
index 00993c09ff..68df572517 100644
--- a/src/library/scala/collection/parallel/Combiner.scala
+++ b/src/library/scala/collection/parallel/Combiner.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Parallel
@@ -33,11 +34,11 @@ import scala.collection.generic.Sizing
* @since 2.9
*/
trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
-
+
@transient
@volatile
var _combinerTaskSupport = defaultTaskSupport
-
+
def combinerTaskSupport = {
val cts = _combinerTaskSupport
if (cts eq null) {
@@ -45,9 +46,9 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
defaultTaskSupport
} else cts
}
-
+
def combinerTaskSupport_=(cts: TaskSupport) = _combinerTaskSupport = cts
-
+
/** Combines the contents of the receiver builder and the `other` builder,
* producing a new builder containing both their elements.
*
@@ -81,15 +82,15 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel {
* By default, this method returns `false`.
*/
def canBeShared: Boolean = false
-
+
/** Constructs the result and sets the appropriate tasksupport object to the resulting collection
* if this is applicable.
*/
def resultWithTaskSupport: To = {
- val res = result
+ val res = result()
setTaskSupport(res, combinerTaskSupport)
}
-
+
}
diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala
index 2b24c88139..2ceeb18eef 100644
--- a/src/library/scala/collection/parallel/ParIterable.scala
+++ b/src/library/scala/collection/parallel/ParIterable.scala
@@ -6,12 +6,12 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.GenIterable
import scala.collection.generic._
import scala.collection.parallel.mutable.ParArrayCombiner
-import scala.collection.parallel.mutable.ParArray
/** A template trait for parallel iterable collections.
*
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 0f06ff37af..a0b42f248e 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel
-
-
-
+package scala
+package collection.parallel
import scala.collection.mutable.Builder
import scala.collection.mutable.ArrayBuffer
@@ -171,9 +168,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
/** The task support object which is responsible for scheduling and
* load-balancing tasks to processors.
- *
+ *
* @see [[scala.collection.parallel.TaskSupport]]
- */
+ */
def tasksupport = {
val ts = _tasksupport
if (ts eq null) {
@@ -188,18 +185,18 @@ self: ParIterableLike[T, Repr, Sequential] =>
* A task support object can be changed in a parallel collection after it
* has been created, but only during a quiescent period, i.e. while there
* are no concurrent invocations to parallel collection methods.
- *
- * Here is a way to change the task support of a parallel collection:
- *
- * {{{
- * import scala.collection.parallel._
- * val pc = mutable.ParArray(1, 2, 3)
- * pc.tasksupport = new ForkJoinTaskSupport(
- * new scala.concurrent.forkjoin.ForkJoinPool(2))
- * }}}
+ *
+ * Here is a way to change the task support of a parallel collection:
+ *
+ * {{{
+ * import scala.collection.parallel._
+ * val pc = mutable.ParArray(1, 2, 3)
+ * pc.tasksupport = new ForkJoinTaskSupport(
+ * new scala.concurrent.forkjoin.ForkJoinPool(2))
+ * }}}
*
* @see [[scala.collection.parallel.TaskSupport]]
- */
+ */
def tasksupport_=(ts: TaskSupport) = _tasksupport = ts
def seq: Sequential
@@ -214,7 +211,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def nonEmpty = size != 0
- def head = iterator.next
+ def head = iterator.next()
def headOption = if (nonEmpty) Some(head) else None
@@ -433,12 +430,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @tparam S the type of accumulated results
* @param z the initial value for the accumulated result of the partition - this
* will typically be the neutral element for the `seqop` operator (e.g.
- * `Nil` for list concatenation or `0` for summation)
+ * `Nil` for list concatenation or `0` for summation) and may be evaluated
+ * more than once
* @param seqop an operator used to accumulate results within a partition
* @param combop an associative operator used to combine results from different partitions
*/
- def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
- tasksupport.executeAndWaitResult(new Aggregate(z, seqop, combop, splitter))
+ def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = {
+ tasksupport.executeAndWaitResult(new Aggregate(() => z, seqop, combop, splitter))
}
def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op)
@@ -453,7 +451,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op)
- /** Applies a function `f` to all the elements of $coll in a undefined order.
+ /** Applies a function `f` to all the elements of $coll in an undefined order.
*
* @tparam U the result type of the function applied to each element, which is always discarded
* @param f function applied to each element
@@ -626,7 +624,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
val b = bf(repr)
this.splitter.copy2builder[U, That, Builder[U, That]](b)
for (elem <- that.seq) b += elem
- setTaskSupport(b.result, tasksupport)
+ setTaskSupport(b.result(), tasksupport)
}
}
@@ -727,7 +725,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
tree => tasksupport.executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => bf(repr).asCombiner)) mapResult {
cb => cb.resultWithTaskSupport
})
- }) else setTaskSupport((bf(repr) += z).result, tasksupport)
+ }) else setTaskSupport((bf(repr) += z).result(), tasksupport)
} else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
} else setTaskSupport(seq.scan(z)(op)(bf2seq(bf)), tasksupport)
@@ -819,10 +817,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
- tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport });
+ tasksupport.executeAndWaitResult(new Zip(combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult { _.resultWithTaskSupport })
} else setTaskSupport(seq.zip(that)(bf2seq(bf)), tasksupport)
- def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false)
+ def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, inclusive = false)
def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
@@ -830,11 +828,11 @@ self: ParIterableLike[T, Repr, Sequential] =>
new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
_.resultWithTaskSupport
}
- );
+ )
} else setTaskSupport(seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)), tasksupport)
protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = {
- tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport });
+ tasksupport.executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.resultWithTaskSupport })
}
protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = {
@@ -877,13 +875,13 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def toSet[U >: T]: immutable.ParSet[U] = toParCollection[U, immutable.ParSet[U]](() => immutable.ParSet.newCombiner[U])
override def toMap[K, V](implicit ev: T <:< (K, V)): immutable.ParMap[K, V] = toParMap[K, V, immutable.ParMap[K, V]](() => immutable.ParMap.newCombiner[K, V])
-
+
override def toVector: Vector[T] = to[Vector]
override def to[Col[_]](implicit cbf: CanBuildFrom[Nothing, T, Col[T @uncheckedVariance]]): Col[T @uncheckedVariance] = if (cbf().isCombiner) {
toParCollection[T, Col[T]](() => cbf().asCombiner)
} else seq.to(cbf)
-
+
/* tasks */
protected trait StrictSplitterCheckTask[R, Tp] extends Task[R, Tp] {
@@ -903,7 +901,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp]
def shouldSplitFurther = pit.shouldSplitFurther(self.repr, tasksupport.parallelismLevel)
def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure
- private[parallel] override def signalAbort = pit.abort
+ private[parallel] override def signalAbort = pit.abort()
override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")"
}
@@ -920,8 +918,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
def combineResults(fr: FR, sr: SR): R
@volatile var result: R = null.asInstanceOf[R]
private[parallel] override def signalAbort() {
- ft.signalAbort
- st.signalAbort
+ ft.signalAbort()
+ st.signalAbort()
}
protected def mergeSubtasks() {
ft mergeThrowables st
@@ -935,9 +933,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
(f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- tasksupport.executeAndWaitResult(ft)
- tasksupport.executeAndWaitResult(st)
- mergeSubtasks
+ tasksupport.executeAndWaitResult(ft) : Any
+ tasksupport.executeAndWaitResult(st) : Any
+ mergeSubtasks()
}
}
@@ -946,10 +944,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
(f: First, s: Second)
extends Composite[FR, SR, R, First, Second](f, s) {
def leaf(prevr: Option[R]) = {
- val ftfuture = tasksupport.execute(ft)
- tasksupport.executeAndWaitResult(st)
+ val ftfuture: () => Any = tasksupport.execute(ft)
+ tasksupport.executeAndWaitResult(st) : Any
ftfuture()
- mergeSubtasks
+ mergeSubtasks()
}
}
@@ -962,7 +960,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
result = map(initialResult)
}
private[parallel] override def signalAbort() {
- inner.signalAbort
+ inner.signalAbort()
}
override def requiresStrictSplitters = inner.requiresStrictSplitters
}
@@ -1005,10 +1003,10 @@ self: ParIterableLike[T, Repr, Sequential] =>
override def merge(that: Fold[U]) = result = op(result, that.result)
}
- protected[this] class Aggregate[S](z: S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
+ protected[this] class Aggregate[S](z: () => S, seqop: (S, T) => S, combop: (S, S) => S, protected[this] val pit: IterableSplitter[T])
extends Accessor[S, Aggregate[S]] {
@volatile var result: S = null.asInstanceOf[S]
- def leaf(prevr: Option[S]) = result = pit.foldLeft(z)(seqop)
+ def leaf(prevr: Option[S]) = result = pit.foldLeft(z())(seqop)
protected[this] def newSubtask(p: IterableSplitter[T]) = new Aggregate(z, seqop, combop, p)
override def merge(that: Aggregate[S]) = result = combop(result, that.result)
}
@@ -1084,7 +1082,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
extends Accessor[Boolean, Forall] {
@volatile var result: Boolean = true
- def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort }
+ def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort() }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p)
override def merge(that: Forall) = result = result && that.result
}
@@ -1092,7 +1090,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
extends Accessor[Boolean, Exists] {
@volatile var result: Boolean = false
- def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort }
+ def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort() }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p)
override def merge(that: Exists) = result = result || that.result
}
@@ -1100,7 +1098,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T])
extends Accessor[Option[U], Find[U]] {
@volatile var result: Option[U] = None
- def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort }
+ def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort() }
protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p)
override def merge(that: Find[U]) = if (this.result == None) result = that.result
}
@@ -1152,7 +1150,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
// note: HashMapCombiner doesn't merge same keys until evaluation
val cb = mcf()
while (pit.hasNext) {
- val elem = pit.next
+ val elem = pit.next()
cb += f(elem) -> elem
}
result = cb
@@ -1473,9 +1471,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
/* alias methods */
- def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op);
+ def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op)
- def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op);
+ def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op)
/* debug information */
@@ -1488,7 +1486,7 @@ self: ParIterableLike[T, Repr, Sequential] =>
def debugBuffer: ArrayBuffer[String] = null
private[parallel] def debugclear() = synchronized {
- debugBuffer.clear
+ debugBuffer.clear()
}
private[parallel] def debuglog(s: String) = synchronized {
@@ -1504,31 +1502,3 @@ self: ParIterableLike[T, Repr, Sequential] =>
})
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParIterableView.scala b/src/library/scala/collection/parallel/ParIterableView.scala
index 7644e1bd12..6dce19db19 100644
--- a/src/library/scala/collection/parallel/ParIterableView.scala
+++ b/src/library/scala/collection/parallel/ParIterableView.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Parallel, IterableView, GenIterableView, Iterator }
import scala.collection.generic.CanCombineFrom
diff --git a/src/library/scala/collection/parallel/ParIterableViewLike.scala b/src/library/scala/collection/parallel/ParIterableViewLike.scala
index 0ecd6bd9ec..1ec0ff9c32 100644
--- a/src/library/scala/collection/parallel/ParIterableViewLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Parallel
import scala.collection.{ IterableView, IterableViewLike }
@@ -50,7 +51,8 @@ extends GenIterableView[T, Coll]
self =>
override def foreach[U](f: T => U): Unit = super[ParIterableLike].foreach(f)
- override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner");
+ override protected[this] def newCombiner: Combiner[T, This] = throw new UnsupportedOperationException(this + ".newCombiner")
+
protected[this] def viewIdentifier: String
protected[this] def viewIdString: String
@@ -130,7 +132,7 @@ self =>
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
+ newZipped(ParRange(0, splitter.remaining, 1, inclusive = false)).asInstanceOf[That]
override def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[This, (U, S), That]): That =
newZippedAllTryParSeq(that, thisElem, thatElem).asInstanceOf[That]
@@ -139,7 +141,7 @@ self =>
} otherwise {
val b = bf(underlying)
b ++= this.iterator
- b.result
+ b.result()
}
/* wrapper virtual ctors */
diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala
index 1f27ae830a..9f92e6c1e8 100644
--- a/src/library/scala/collection/parallel/ParMap.scala
+++ b/src/library/scala/collection/parallel/ParMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Map
import scala.collection.GenMap
diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala
index 56594bec96..ee14324c41 100644
--- a/src/library/scala/collection/parallel/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/ParMapLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
@@ -67,7 +68,7 @@ self =>
i =>
val iter = s
def hasNext = iter.hasNext
- def next() = iter.next._1
+ def next() = iter.next()._1
def split = {
val ss = iter.split.map(keysIterator(_))
ss.foreach { _.signalDelegate = i.signalDelegate }
@@ -84,7 +85,7 @@ self =>
i =>
val iter = s
def hasNext = iter.hasNext
- def next() = iter.next._2
+ def next() = iter.next()._2
def split = {
val ss = iter.split.map(valuesIterator(_))
ss.foreach { _.signalDelegate = i.signalDelegate }
diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala
index b905d1d41f..b4a30e5dc2 100644
--- a/src/library/scala/collection/parallel/ParSeq.scala
+++ b/src/library/scala/collection/parallel/ParSeq.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
import scala.collection.generic.GenericCompanion
import scala.collection.generic.GenericParCompanion
@@ -18,9 +16,6 @@ import scala.collection.generic.ParFactory
import scala.collection.generic.CanCombineFrom
import scala.collection.GenSeq
import scala.collection.parallel.mutable.ParArrayCombiner
-import scala.collection.parallel.mutable.ParArray
-
-
/** A template trait for parallel sequences.
*
@@ -56,26 +51,3 @@ object ParSeq extends ParFactory[ParSeq] {
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 201b624c72..6693e30fcd 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Parallel, SeqLike, GenSeqLike, GenSeq, GenIterable, Iterator }
import scala.collection.generic.DefaultSignalling
@@ -44,8 +45,8 @@ trait ParSeqLike[+T, +Repr <: ParSeq[T], +Sequential <: Seq[T] with SeqLike[T, S
extends scala.collection.GenSeqLike[T, Repr]
with ParIterableLike[T, Repr, Sequential] {
self =>
-
- type SuperParIterator = IterableSplitter[T]
+
+ protected[this] type SuperParIterator = IterableSplitter[T]
/** A more refined version of the iterator found in the `ParallelIterable` trait,
* this iterator can be split into arbitrary subsets of iterators.
@@ -68,7 +69,7 @@ self =>
val x = self(i)
i += 1
x
- } else Iterator.empty.next
+ } else Iterator.empty.next()
def head = self(i)
@@ -228,7 +229,7 @@ self =>
b ++= pits(0)
b ++= patch
b ++= pits(2)
- setTaskSupport(b.result, tasksupport)
+ setTaskSupport(b.result(), tasksupport)
}
def updated[U >: T, That](index: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
@@ -252,7 +253,7 @@ self =>
def padTo[U >: T, That](len: Int, elem: U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (length < len) {
patch(length, new immutable.Repetition(elem, len - length), 0)
- } else patch(length, Nil, 0);
+ } else patch(length, Nil, 0)
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf(repr).isCombiner && that.isParSeq) {
val thatseq = that.asParSeq
@@ -260,7 +261,7 @@ self =>
new Zip(length min thatseq.length, combinerFactory(() => bf(repr).asCombiner), splitter, thatseq.splitter) mapResult {
_.resultWithTaskSupport
}
- );
+ )
} else super.zip(that)(bf)
/** Tests whether every element of this $coll relates to the
@@ -423,7 +424,7 @@ self =>
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
result = pit.sameElements(otherpit)
- if (!result) pit.abort
+ if (!result) pit.abort()
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
@@ -471,7 +472,7 @@ self =>
@volatile var result: Boolean = true
def leaf(prev: Option[Boolean]) = if (!pit.isAborted) {
result = pit.corresponds(corr)(otherpit)
- if (!result) pit.abort
+ if (!result) pit.abort()
}
protected[this] def newSubtask(p: SuperParIterator) = unsupported
override def split = {
diff --git a/src/library/scala/collection/parallel/ParSeqView.scala b/src/library/scala/collection/parallel/ParSeqView.scala
index 3e3c497352..b80994514b 100644
--- a/src/library/scala/collection/parallel/ParSeqView.scala
+++ b/src/library/scala/collection/parallel/ParSeqView.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
+package scala
+package collection.parallel
-package scala.collection.parallel
-
-import scala.collection.{ TraversableView, SeqView, Parallel, Iterator }
+import scala.collection.{ SeqView, Parallel, Iterator }
import scala.collection.generic.CanCombineFrom
/** A template view of a non-strict view of a parallel sequence.
diff --git a/src/library/scala/collection/parallel/ParSeqViewLike.scala b/src/library/scala/collection/parallel/ParSeqViewLike.scala
index 04369d8fde..9d30a052de 100644
--- a/src/library/scala/collection/parallel/ParSeqViewLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqViewLike.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Parallel, SeqView, SeqViewLike, GenSeqView, GenSeqViewLike, GenSeq }
import scala.collection.{ GenIterable, GenTraversable, GenTraversableOnce, Iterator }
@@ -125,8 +126,8 @@ self =>
}
protected def newReversed: Transformed[T] = new Reversed { }
protected def newPatched[U >: T](_from: Int, _patch: GenSeq[U], _replaced: Int): Transformed[U] = new {
- val from = _from;
- val patch = _patch;
+ val from = _from
+ val patch = _patch
val replaced = _replaced
} with Patched[U]
@@ -147,7 +148,7 @@ self =>
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = newMapped(f).asInstanceOf[That]
override def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[This, (U, S), That]): That = newZippedTryParSeq(that).asInstanceOf[That]
override def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[This, (U, Int), That]): That =
- newZipped(ParRange(0, splitter.remaining, 1, false)).asInstanceOf[That]
+ newZipped(ParRange(0, splitter.remaining, 1, inclusive = false)).asInstanceOf[That]
override def reverse: This = newReversed.asInstanceOf[This]
override def reverseMap[S, That](f: T => S)(implicit bf: CanBuildFrom[This, S, That]): That = reverse.map(f)
@@ -173,7 +174,7 @@ self =>
} otherwise {
val b = bf(underlying)
b ++= this.iterator
- b.result
+ b.result()
}
/* tasks */
diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala
index 6e5e9b4387..ba3d23f0e4 100644
--- a/src/library/scala/collection/parallel/ParSet.scala
+++ b/src/library/scala/collection/parallel/ParSet.scala
@@ -6,25 +6,12 @@
** |/ **
\* */
+package scala
+package collection
+package parallel
-package scala.collection.parallel
-
-
-
-
-
-
-
-import scala.collection.Set
-import scala.collection.GenSet
-import scala.collection.mutable.Builder
import scala.collection.generic._
-
-
-
-
-
/** A template trait for parallel sets.
*
* $sideeffects
@@ -35,12 +22,12 @@ import scala.collection.generic._
* @since 2.9
*/
trait ParSet[T]
-extends GenSet[T]
+ extends GenSet[T]
with GenericParTemplate[T, ParSet]
with ParIterable[T]
with ParSetLike[T, ParSet[T], Set[T]]
-{
-self =>
+{ self =>
+
override def empty: ParSet[T] = mutable.ParHashSet[T]()
//protected[this] override def newCombiner: Combiner[T, ParSet[T]] = ParSet.newCombiner[T]
@@ -50,39 +37,8 @@ self =>
override def stringPrefix = "ParSet"
}
-
-
object ParSet extends ParSetFactory[ParSet] {
def newCombiner[T]: Combiner[T, ParSet[T]] = mutable.ParHashSetCombiner[T]
implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParSet[T]] = new GenericCanCombineFrom[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala
index c80b5ded26..a50d2ae430 100644
--- a/src/library/scala/collection/parallel/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/ParSetLike.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
@@ -15,14 +16,6 @@ import scala.collection.SetLike
import scala.collection.GenSetLike
import scala.collection.GenSet
import scala.collection.Set
-import scala.collection.mutable.Builder
-
-
-
-
-
-
-
/** A template trait for parallel sets. This trait is mixed in with concrete
* parallel sets to override the representation type.
diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala
index 42563f4dc9..2eb202ce05 100644
--- a/src/library/scala/collection/parallel/PreciseSplitter.scala
+++ b/src/library/scala/collection/parallel/PreciseSplitter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.Seq
diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala
index 3150b0d763..5f2ceac0e0 100644
--- a/src/library/scala/collection/parallel/RemainsIterator.scala
+++ b/src/library/scala/collection/parallel/RemainsIterator.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel
-
-
+package scala
+package collection.parallel
import scala.collection.Parallel
import scala.collection.generic.Signalling
@@ -21,8 +19,6 @@ import scala.collection.Iterator.empty
import scala.collection.GenTraversableOnce
import scala.collection.parallel.immutable.repetition
-
-
private[collection] trait RemainsIterator[+T] extends Iterator[T] {
/** The number of elements this iterator has yet to iterate.
* This method doesn't change the state of the iterator.
@@ -35,7 +31,6 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] {
def isRemainingCheap = true
}
-
/** Augments iterators with additional methods, mostly transformers,
* assuming they iterate an iterable collection.
*
@@ -47,47 +42,47 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
override def count(p: T => Boolean): Int = {
var i = 0
- while (hasNext) if (p(next)) i += 1
+ while (hasNext) if (p(next())) i += 1
i
}
override def reduce[U >: T](op: (U, U) => U): U = {
- var r: U = next
- while (hasNext) r = op(r, next)
+ var r: U = next()
+ while (hasNext) r = op(r, next())
r
}
override def fold[U >: T](z: U)(op: (U, U) => U): U = {
var r = z
- while (hasNext) r = op(r, next)
+ while (hasNext) r = op(r, next())
r
}
override def sum[U >: T](implicit num: Numeric[U]): U = {
var r: U = num.zero
- while (hasNext) r = num.plus(r, next)
+ while (hasNext) r = num.plus(r, next())
r
}
override def product[U >: T](implicit num: Numeric[U]): U = {
var r: U = num.one
- while (hasNext) r = num.times(r, next)
+ while (hasNext) r = num.times(r, next())
r
}
override def min[U >: T](implicit ord: Ordering[U]): T = {
- var r = next
+ var r = next()
while (hasNext) {
- val curr = next
+ val curr = next()
if (ord.lteq(curr, r)) r = curr
}
r
}
override def max[U >: T](implicit ord: Ordering[U]): T = {
- var r = next
+ var r = next()
while (hasNext) {
- val curr = next
+ val curr = next()
if (ord.gteq(curr, r)) r = curr
}
r
@@ -97,16 +92,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
var i = from
val until = from + len
while (i < until && hasNext) {
- array(i) = next
+ array(i) = next()
i += 1
}
}
def reduceLeft[U >: T](howmany: Int, op: (U, U) => U): U = {
var i = howmany - 1
- var u: U = next
+ var u: U = next()
while (i > 0 && hasNext) {
- u = op(u, next)
+ u = op(u, next())
i -= 1
}
u
@@ -117,15 +112,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
if (isRemainingCheap) cb.sizeHint(remaining)
- while (hasNext) cb += f(next)
+ while (hasNext) cb += f(next())
cb
}
def collect2combiner[S, That](pf: PartialFunction[T, S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
+ val runWith = pf.runWith(cb += _)
while (hasNext) {
- val curr = next
- if (pf.isDefinedAt(curr)) cb += pf(curr)
+ val curr = next()
+ runWith(curr)
}
cb
}
@@ -133,7 +129,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def flatmap2combiner[S, That](f: T => GenTraversableOnce[S], cb: Combiner[S, That]): Combiner[S, That] = {
//val cb = pbf(repr)
while (hasNext) {
- val traversable = f(next).seq
+ val traversable = f(next()).seq
if (traversable.isInstanceOf[Iterable[_]]) cb ++= traversable.asInstanceOf[Iterable[S]].iterator
else cb ++= traversable
}
@@ -148,7 +144,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def filter2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
while (hasNext) {
- val curr = next
+ val curr = next()
if (pred(curr)) cb += curr
}
cb
@@ -156,7 +152,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def filterNot2combiner[U >: T, This](pred: T => Boolean, cb: Combiner[U, This]): Combiner[U, This] = {
while (hasNext) {
- val curr = next
+ val curr = next()
if (!pred(curr)) cb += curr
}
cb
@@ -164,7 +160,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def partition2combiners[U >: T, This](pred: T => Boolean, btrue: Combiner[U, This], bfalse: Combiner[U, This]) = {
while (hasNext) {
- val curr = next
+ val curr = next()
if (pred(curr)) btrue += curr
else bfalse += curr
}
@@ -214,7 +210,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def takeWhile2combiner[U >: T, This](p: T => Boolean, cb: Combiner[U, This]) = {
var loop = true
while (hasNext && loop) {
- val curr = next
+ val curr = next()
if (p(curr)) cb += curr
else loop = false
}
@@ -224,7 +220,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def span2combiners[U >: T, This](p: T => Boolean, before: Combiner[U, This], after: Combiner[U, This]) = {
var isBefore = true
while (hasNext && isBefore) {
- val curr = next
+ val curr = next()
if (p(curr)) before += curr
else {
if (isRemainingCheap) after.sizeHint(remaining + 1)
@@ -240,7 +236,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
var last = z
var i = from
while (hasNext) {
- last = op(last, next)
+ last = op(last, next())
array(i) = last
i += 1
}
@@ -249,7 +245,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def scanToCombiner[U >: T, That](startValue: U, op: (U, U) => U, cb: Combiner[U, That]) = {
var curr = startValue
while (hasNext) {
- curr = op(curr, next)
+ curr = op(curr, next())
cb += curr
}
cb
@@ -259,7 +255,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
var curr = startValue
var left = howmany
while (left > 0) {
- curr = op(curr, next)
+ curr = op(curr, next())
cb += curr
left -= 1
}
@@ -269,16 +265,16 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[
def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining)
while (hasNext && otherpit.hasNext) {
- cb += ((next, otherpit.next))
+ cb += ((next(), otherpit.next()))
}
cb
}
def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = {
if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining)
- while (this.hasNext && that.hasNext) cb += ((this.next, that.next))
- while (this.hasNext) cb += ((this.next, thatelem))
- while (that.hasNext) cb += ((thiselem, that.next))
+ while (this.hasNext && that.hasNext) cb += ((this.next(), that.next()))
+ while (this.hasNext) cb += ((this.next(), thatelem))
+ while (that.hasNext) cb += ((thiselem, that.next()))
cb
}
@@ -298,7 +294,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
var total = 0
var loop = true
while (hasNext && loop) {
- if (pred(next)) total += 1
+ if (pred(next())) total += 1
else loop = false
}
total
@@ -308,7 +304,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
var i = 0
var loop = true
while (hasNext && loop) {
- if (pred(next)) loop = false
+ if (pred(next())) loop = false
else i += 1
}
if (loop) -1 else i
@@ -318,7 +314,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
var pos = -1
var i = 0
while (hasNext) {
- if (pred(next)) pos = i
+ if (pred(next())) pos = i
i += 1
}
pos
@@ -326,7 +322,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
def corresponds[S](corr: (T, S) => Boolean)(that: Iterator[S]): Boolean = {
while (hasNext && that.hasNext) {
- if (!corr(next, that.next)) return false
+ if (!corr(next(), that.next())) return false
}
hasNext == that.hasNext
}
@@ -348,7 +344,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
//val cb = cbf(repr)
if (isRemainingCheap) cb.sizeHint(remaining)
var lst = List[S]()
- while (hasNext) lst ::= f(next)
+ while (hasNext) lst ::= f(next())
while (lst != Nil) {
cb += lst.head
lst = lst.tail
@@ -363,7 +359,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter
while (hasNext) {
if (j == index) {
cb += elem
- next
+ next()
} else cb += next
j += 1
}
@@ -438,7 +434,7 @@ self =>
class Taken(taken: Int) extends IterableSplitter[T] {
var remaining = taken min self.remaining
def hasNext = remaining > 0
- def next = { remaining -= 1; self.next }
+ def next = { remaining -= 1; self.next() }
def dup: IterableSplitter[T] = self.dup.take(taken)
def split: Seq[IterableSplitter[T]] = takeSeq(self.split) { (p, n) => p.take(n) }
protected[this] def takeSeq[PI <: IterableSplitter[T]](sq: Seq[PI])(taker: (PI, Int) => PI) = {
@@ -466,7 +462,7 @@ self =>
class Mapped[S](f: T => S) extends IterableSplitter[S] {
signalDelegate = self.signalDelegate
def hasNext = self.hasNext
- def next = f(self.next)
+ def next = f(self.next())
def remaining = self.remaining
def dup: IterableSplitter[S] = self.dup map f
def split: Seq[IterableSplitter[S]] = self.split.map { _ map f }
@@ -483,8 +479,8 @@ self =>
} else false
def next = if (curr eq self) {
hasNext
- curr.next
- } else curr.next
+ curr.next()
+ } else curr.next()
def remaining = if (curr eq self) curr.remaining + that.remaining else curr.remaining
protected def firstNonEmpty = (curr eq self) && curr.hasNext
def dup: IterableSplitter[U] = self.dup.appendParIterable[U, PI](that)
@@ -496,7 +492,7 @@ self =>
class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] {
signalDelegate = self.signalDelegate
def hasNext = self.hasNext && that.hasNext
- def next = (self.next, that.next)
+ def next = (self.next(), that.next())
def remaining = self.remaining min that.remaining
def dup: IterableSplitter[(T, S)] = self.dup.zipParSeq(that)
def split: Seq[IterableSplitter[(T, S)]] = {
@@ -514,9 +510,10 @@ self =>
signalDelegate = self.signalDelegate
def hasNext = self.hasNext || that.hasNext
def next = if (self.hasNext) {
- if (that.hasNext) (self.next, that.next)
- else (self.next, thatelem)
- } else (thiselem, that.next);
+ if (that.hasNext) (self.next(), that.next())
+ else (self.next(), thatelem)
+ } else (thiselem, that.next())
+
def remaining = self.remaining max that.remaining
def dup: IterableSplitter[(U, S)] = self.dup.zipAllParSeq(that, thiselem, thatelem)
def split: Seq[IterableSplitter[(U, S)]] = {
@@ -530,10 +527,8 @@ self =>
}
def zipAllParSeq[S, U >: T, R >: S](that: SeqSplitter[S], thisElem: U, thatElem: R) = new ZippedAll[U, R](that, thisElem, thatElem)
-
}
-
/** Parallel sequence iterators allow splitting into arbitrary subsets.
*
* @tparam T type of the elements iterated.
@@ -605,7 +600,7 @@ self =>
} else Seq(sz)
}
val (selfszfrom, thatszfrom) = splitsizes.zip(szcum.init).span(_._2 < selfrem)
- val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 });
+ val (selfsizes, thatsizes) = (selfszfrom map { _._1 }, thatszfrom map { _._1 })
// split iterators
val selfs = self.psplit(selfsizes: _*)
@@ -674,37 +669,3 @@ self =>
def patchParSeq[U >: T](from: Int, patchElems: SeqSplitter[U], replaced: Int) = new Patched(from, patchElems, replaced)
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala
index dc49bcf9d7..8329f15d88 100644
--- a/src/library/scala/collection/parallel/Splitter.scala
+++ b/src/library/scala/collection/parallel/Splitter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.{ Seq, Iterator }
@@ -52,7 +53,7 @@ trait Splitter[+T] extends Iterator[T] {
object Splitter {
def empty[T]: Splitter[T] = new Splitter[T] {
def hasNext = false
- def next = Iterator.empty.next
+ def next = Iterator.empty.next()
def split = Seq(this)
}
}
diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala
index 9bed5be51b..84bb5e425b 100644
--- a/src/library/scala/collection/parallel/TaskSupport.scala
+++ b/src/library/scala/collection/parallel/TaskSupport.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
@@ -17,28 +18,25 @@ import scala.concurrent.ExecutionContext
-/** A trait implementing the scheduling of
- * a parallel collection operation.
+/** A trait implementing the scheduling of a parallel collection operation.
*
* Parallel collections are modular in the way operations are scheduled. Each
* parallel collection is parametrized with a task support object which is
* responsible for scheduling and load-balancing tasks to processors.
- *
+ *
* A task support object can be changed in a parallel collection after it has
* been created, but only during a quiescent period, i.e. while there are no
* concurrent invocations to parallel collection methods.
*
* There are currently a few task support implementations available for
* parallel collections. The [[scala.collection.parallel.ForkJoinTaskSupport]]
- * uses a fork-join pool
- * internally and is used by default on JVM 1.6 or greater. The less efficient
- * [[scala.collection.parallel.ThreadPoolTaskSupport]] is a fallback for JVM
- * 1.5 and JVMs that do not support the fork join pools. The
- * [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
+ * uses a fork-join pool internally.
+ *
+ * The [[scala.collection.parallel.ExecutionContextTaskSupport]] uses the
* default execution context implementation found in scala.concurrent, and it
- * reuses the thread pool used in scala.concurrent (this is either a fork join
- * pool or a thread pool executor, depending on the JVM version). The
- * execution context task support is set to each parallel collection by
+ * reuses the thread pool used in scala.concurrent.
+ *
+ * The execution context task support is set to each parallel collection by
* default, so parallel collections reuse the same fork-join pool as the
* future API.
*
@@ -68,17 +66,18 @@ extends TaskSupport with AdaptiveWorkStealingForkJoinTasks
*
* @see [[scala.collection.parallel.TaskSupport]] for more information.
*/
+@deprecated("Use `ForkJoinTaskSupport` instead.", "2.11.0")
class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool)
extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks
/** A task support that uses an execution context to schedule tasks.
- *
+ *
* It can be used with the default execution context implementation in the
* `scala.concurrent` package. It internally forwards the call to either a
* forkjoin based task support or a thread pool executor one, depending on
* what the execution context uses.
- *
+ *
* By default, parallel collections are parametrized with this task support
* object, so parallel collections share the same execution context backend
* as the rest of the `scala.concurrent` package.
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index cec9e294c1..4aa11b25da 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
@@ -54,38 +55,30 @@ trait Task[R, +Tp] {
leaf(lastres)
result = result // ensure that effects of `leaf` are visible to readers of `result`
} catchBreak {
- signalAbort
+ signalAbort()
}
} catch {
case thr: Exception =>
result = result // ensure that effects of `leaf` are visible
throwable = thr
- signalAbort
+ signalAbort()
}
}
private[parallel] def tryMerge(t: Tp @uncheckedVariance) {
val that = t.asInstanceOf[Task[R, Tp]]
- val local = result // ensure that any effects of modifying `result` are detected
- // checkMerge(that)
if (this.throwable == null && that.throwable == null) merge(t)
mergeThrowables(that)
}
- private def checkMerge(that: Task[R, Tp] @uncheckedVariance) {
- if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) {
- println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable)
- } else if (this.throwable != null || that.throwable != null) {
- println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable)
- }
- }
-
private[parallel] def mergeThrowables(that: Task[_, _]) {
- if (this.throwable != null && that.throwable != null) {
- // merge exceptions, since there were multiple exceptions
- this.throwable = this.throwable alongWith that.throwable
- } else if (that.throwable != null) this.throwable = that.throwable
- else this.throwable = this.throwable
+ // TODO: As soon as we target Java >= 7, use Throwable#addSuppressed
+ // to pass additional Throwables to the caller, e. g.
+ // if (this.throwable != null && that.throwable != null)
+ // this.throwable.addSuppressed(that.throwable)
+ // For now, we just use whatever Throwable comes across “first”.
+ if (this.throwable == null && that.throwable != null)
+ this.throwable = that.throwable
}
// override in concrete task implementations to signal abort to other tasks
@@ -176,7 +169,6 @@ trait AdaptiveWorkStealingTasks extends Tasks {
while (last.next != null) {
// val lastresult = Option(last.body.result)
- val beforelast = last
last = last.next
if (last.tryCancel()) {
// println("Done with " + beforelast.body + ", next direct is " + last.body)
@@ -202,7 +194,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
last = t
t.start()
}
- } while (head.body.shouldSplitFurther);
+ } while (head.body.shouldSplitFurther)
head.next = last
head
}
@@ -225,6 +217,7 @@ trait AdaptiveWorkStealingTasks extends Tasks {
/** An implementation of tasks objects based on the Java thread pooling API. */
+@deprecated("Use `ForkJoinTasks` instead.", "2.11.0")
trait ThreadPoolTasks extends Tasks {
import java.util.concurrent._
@@ -313,7 +306,7 @@ trait ThreadPoolTasks extends Tasks {
() => {
t.sync()
- t.body.forwardThrowable
+ t.body.forwardThrowable()
t.body.result
}
}
@@ -325,7 +318,7 @@ trait ThreadPoolTasks extends Tasks {
t.start()
t.sync()
- t.body.forwardThrowable
+ t.body.forwardThrowable()
t.body.result
}
@@ -333,6 +326,7 @@ trait ThreadPoolTasks extends Tasks {
}
+@deprecated("Use `ForkJoinTasks` instead.", "2.11.0")
object ThreadPoolTasks {
import java.util.concurrent._
@@ -357,60 +351,6 @@ object ThreadPoolTasks {
)
}
-
-/** An implementation of tasks objects based on the Java thread pooling API and synchronization using futures. */
-@deprecated("This implementation is not used.", "2.10.0")
-trait FutureThreadPoolTasks extends Tasks {
- import java.util.concurrent._
-
- trait WrappedTask[R, +Tp] extends Runnable with super.WrappedTask[R, Tp] {
- @volatile var future: Future[_] = null
-
- def start() = {
- executor.synchronized {
- future = executor.submit(this)
- }
- }
- def sync() = future.get
- def tryCancel = false
- def run = {
- compute()
- }
- }
-
- protected def newWrappedTask[R, Tp](b: Task[R, Tp]): WrappedTask[R, Tp]
-
- val environment: AnyRef = FutureThreadPoolTasks.defaultThreadPool
- def executor = environment.asInstanceOf[ThreadPoolExecutor]
-
- def execute[R, Tp](task: Task[R, Tp]): () => R = {
- val t = newWrappedTask(task)
-
- // debuglog("-----------> Executing without wait: " + task)
- t.start
-
- () => {
- t.sync
- t.body.forwardThrowable
- t.body.result
- }
- }
-
- def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
- val t = newWrappedTask(task)
-
- // debuglog("-----------> Executing with wait: " + task)
- t.start
-
- t.sync
- t.body.forwardThrowable
- t.body.result
- }
-
- def parallelismLevel = FutureThreadPoolTasks.numCores
-
-}
-
object FutureThreadPoolTasks {
import java.util.concurrent._
@@ -467,8 +407,8 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
}
() => {
- fjtask.sync
- fjtask.body.forwardThrowable
+ fjtask.sync()
+ fjtask.body.forwardThrowable()
fjtask.body.result
}
}
@@ -489,9 +429,9 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
forkJoinPool.execute(fjtask)
}
- fjtask.sync
+ fjtask.sync()
// if (fjtask.body.throwable != null) println("throwing: " + fjtask.body.throwable + " at " + fjtask.body)
- fjtask.body.forwardThrowable
+ fjtask.body.forwardThrowable()
fjtask.body.result
}
@@ -520,7 +460,7 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS
}
-
+@deprecated("Use `AdaptiveWorkStealingForkJoinTasks` instead.", "2.11.0")
trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks {
class WrappedTask[R, Tp](val body: Task[R, Tp])
diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
index b25230bbeb..854d0ba918 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
@@ -109,7 +110,7 @@ self =>
}
def next(): (K, V) = {
i += 1
- val r = triter.next
+ val r = triter.next()
r
}
def hasNext: Boolean = {
diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
index e7e64eb2ad..65a632470e 100644
--- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
@@ -106,7 +107,7 @@ self =>
}
def next(): T = {
i += 1
- triter.next
+ triter.next()
}
def hasNext: Boolean = {
i < sz
diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala
index 142f07ff26..417622facc 100644
--- a/src/library/scala/collection/parallel/immutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala
@@ -6,17 +6,13 @@
** |/ **
\* */
-
-package scala.collection
+package scala
+package collection
package parallel.immutable
-
import scala.collection.generic._
-
import scala.collection.parallel.ParIterableLike
import scala.collection.parallel.Combiner
-import scala.collection.GenIterable
-
/** A template trait for immutable parallel iterable collections.
*
@@ -30,22 +26,18 @@ import scala.collection.GenIterable
* @since 2.9
*/
trait ParIterable[+T]
-extends scala.collection/*.immutable*/.GenIterable[T]
+extends scala.collection.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], scala.collection.immutable.Iterable[T]]
with Immutable
{
override def companion: GenericCompanion[ParIterable] with GenericParCompanion[ParIterable] = ParIterable
-
// if `immutable.ParIterableLike` is introduced, please move these 4 methods there
override def toIterable: ParIterable[T] = this
-
override def toSeq: ParSeq[T] = toParCollection[T, ParSeq[T]](() => ParSeq.newCombiner[T])
-
}
-
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
@@ -53,21 +45,5 @@ object ParIterable extends ParFactory[ParIterable] {
new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParIterable[T]] = ParVector.newBuilder[T]
-
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParVector.newCombiner[T]
-
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala
index e904a7616b..4d99006d0f 100644
--- a/src/library/scala/collection/parallel/immutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/immutable/ParMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
import scala.collection.generic.ParMapFactory
diff --git a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled b/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
deleted file mode 100644
index 5f9c9c3d3d..0000000000
--- a/src/library/scala/collection/parallel/immutable/ParNumericRange.scala.disabled
+++ /dev/null
@@ -1,128 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.collection.parallel.immutable
-
-
-
-import scala.collection.immutable.NumericRange
-import scala.collection.parallel.Combiner
-import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.ParIterableIterator
-
-
-
-/** Parallel ranges for numeric types.
- *
- * $paralleliterableinfo
- *
- * $sideeffects
- *
- * @param range the sequential range this parallel range was obtained from
- *
- * @author Aleksandar Prokopec
- * @since 2.9
- *
- * @define Coll `immutable.ParRange`
- * @define coll immutable parallel range
- */
-@SerialVersionUID(1L)
-class ParNumericRange[T](val range: NumericRange[T])(implicit num: Integral[T])
-extends ParSeq[T]
- with Serializable
-{
-self =>
-
- def seq = range
-
- @inline final def length = range.length
-
- @inline final def apply(idx: Int) = range.apply(idx);
-
- def parallelIterator = new ParNumericRangeIterator with SCPI
-
- type SCPI = SignalContextPassingIterator[ParNumericRangeIterator]
-
- class ParNumericRangeIterator(range: NumericRange[T] = self.range, num: Integral[T] = self.num)
- extends ParIterator {
- me: SignalContextPassingIterator[ParNumericRangeIterator] =>
- override def toString = "ParNumericRangeIterator(over: " + range + ")"
- private var ind = 0
- private val len = range.length
-
- final def remaining = len - ind
-
- final def hasNext = ind < len
-
- final def next = if (hasNext) {
- val r = range.apply(ind)
- ind += 1
- r
- } else Iterator.empty.next
-
- private def rangeleft: NumericRange[T] = range.drop(ind)
-
- def dup = new ParNumericRangeIterator(rangeleft) with SCPI
-
- def split = {
- val rleft = rangeleft
- val elemleft = rleft.length
- if (elemleft < 2) Seq(new ParNumericRangeIterator(rleft) with SCPI)
- else Seq(
- new ParNumericRangeIterator(rleft.take(elemleft / 2)) with SCPI,
- new ParNumericRangeIterator(rleft.drop(elemleft / 2)) with SCPI
- )
- }
-
- def psplit(sizes: Int*) = {
- var rleft = rangeleft
- for (sz <- sizes) yield {
- val fronttaken = rleft.take(sz)
- rleft = rleft.drop(sz)
- new ParNumericRangeIterator(fronttaken) with SCPI
- }
- }
-
- /* accessors */
-
- override def foreach[U](f: T => U): Unit = {
- rangeleft.foreach(f)
- ind = len
- }
-
- override def reduce[U >: T](op: (U, U) => U): U = {
- val r = rangeleft.reduceLeft(op)
- ind = len
- r
- }
-
- /* transformers */
-
- override def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = {
- while (hasNext) {
- cb += f(next)
- }
- cb
- }
- }
-
-}
-
-
-object ParNumericRange {
- def apply[T](start: T, end: T, step: T, inclusive: Boolean)(implicit num: Integral[T]) = new ParNumericRange[T](
- if (inclusive) NumericRange.inclusive(start, end, step)(num)
- else NumericRange.apply(start, end, step)(num)
- )
-}
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala
index 0c9f82ba2a..ec90de3a7d 100644
--- a/src/library/scala/collection/parallel/immutable/ParRange.scala
+++ b/src/library/scala/collection/parallel/immutable/ParRange.scala
@@ -6,13 +6,13 @@
** |/ **
\* */
-package scala.collection.parallel.immutable
+package scala
+package collection.parallel.immutable
import scala.collection.immutable.Range
import scala.collection.parallel.Combiner
import scala.collection.parallel.SeqSplitter
import scala.collection.generic.CanCombineFrom
-import scala.collection.parallel.IterableSplitter
import scala.collection.Iterator
/** Parallel ranges.
@@ -42,7 +42,7 @@ self =>
@inline final def length = range.length
- @inline final def apply(idx: Int) = range.apply(idx);
+ @inline final def apply(idx: Int) = range.apply(idx)
def splitter = new ParRangeIterator
@@ -60,7 +60,7 @@ self =>
val r = range.apply(ind)
ind += 1
r
- } else Iterator.empty.next
+ } else Iterator.empty.next()
private def rangeleft = range.drop(ind)
@@ -107,7 +107,7 @@ self =>
cb
}
}
-
+
}
object ParRange {
diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala
index aa19307387..6e98b3102d 100644
--- a/src/library/scala/collection/parallel/immutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
@@ -18,9 +19,6 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-import scala.collection.GenSeq
-
-
/** An immutable variant of `ParSeq`.
*
diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala
index 3622377a55..7837d6f264 100644
--- a/src/library/scala/collection/parallel/immutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/immutable/ParSet.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
-import scala.collection.GenSet
import scala.collection.generic._
import scala.collection.parallel.ParSetLike
import scala.collection.parallel.Combiner
diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala
index 1ee7f4ae69..548e7112c7 100644
--- a/src/library/scala/collection/parallel/immutable/ParVector.scala
+++ b/src/library/scala/collection/parallel/immutable/ParVector.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.immutable
diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala
index 5ca0724ffc..8fd84eaf4d 100644
--- a/src/library/scala/collection/parallel/immutable/package.scala
+++ b/src/library/scala/collection/parallel/immutable/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
package immutable {
/** A (parallel) sequence consisting of `length` elements `elem`. Used in the `padTo` method.
diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
index 12b2bc5008..cc25b5b4b2 100644
--- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
import scala.collection.generic.Growable
import scala.collection.generic.Sizing
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index 0a4f30131f..d0d022db4b 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -29,8 +29,6 @@ import scala.collection.mutable.Builder
import scala.collection.GenTraversableOnce
import scala.reflect.ClassTag
-
-
/** Parallel sequence holding elements in a linear array.
*
* `ParArray` is a parallel sequence with a predefined size. The size of the array
@@ -181,10 +179,10 @@ self =>
override def fold[U >: T](z: U)(op: (U, U) => U): U = foldLeft[U](z)(op)
- override def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
+ override def aggregate[S](z: =>S)(seqop: (S, T) => S, combop: (S, S) => S): S = foldLeft[S](z)(seqop)
override def sum[U >: T](implicit num: Numeric[U]): U = {
- var s = sum_quick(num, arr, until, i, num.zero)
+ val s = sum_quick(num, arr, until, i, num.zero)
i = until
s
}
@@ -200,7 +198,7 @@ self =>
}
override def product[U >: T](implicit num: Numeric[U]): U = {
- var p = product_quick(num, arr, until, i, num.one)
+ val p = product_quick(num, arr, until, i, num.one)
i = until
p
}
@@ -226,7 +224,7 @@ self =>
if (all) i = nextuntil
else {
i = until
- abort
+ abort()
}
if (isAborted) return false
@@ -241,7 +239,7 @@ self =>
if (p(a(j).asInstanceOf[T])) j += 1
else return false
}
- return true
+ true
}
override def exists(p: T => Boolean): Boolean = {
@@ -254,7 +252,7 @@ self =>
some = exists_quick(p, array, nextuntil, i)
if (some) {
i = until
- abort
+ abort()
} else i = nextuntil
if (isAborted) return true
@@ -269,7 +267,7 @@ self =>
if (p(a(j).asInstanceOf[T])) return true
else j += 1
}
- return false
+ false
}
override def find(p: T => Boolean): Option[T] = {
@@ -283,7 +281,7 @@ self =>
if (r != None) {
i = until
- abort
+ abort()
} else i = nextuntil
if (isAborted) return r
@@ -298,7 +296,7 @@ self =>
if (p(elem)) return Some(elem)
else j += 1
}
- return None
+ None
}
override def drop(n: Int): ParArrayIterator = {
@@ -405,9 +403,10 @@ self =>
private def collect2combiner_quick[S, That](pf: PartialFunction[T, S], a: Array[Any], cb: Builder[S, That], ntil: Int, from: Int) {
var j = from
+ val runWith = pf.runWith(b => cb += b)
while (j < ntil) {
val curr = a(j).asInstanceOf[T]
- if (pf.isDefinedAt(curr)) cb += pf(curr)
+ runWith(curr)
j += 1
}
}
@@ -432,7 +431,7 @@ self =>
private def filter2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
var j = i
while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
+ val curr = a(j).asInstanceOf[T]
if (pred(curr)) cb += curr
j += 1
}
@@ -447,7 +446,7 @@ self =>
private def filterNot2combiner_quick[U >: T, This](pred: T => Boolean, cb: Builder[U, This], a: Array[Any], ntil: Int, from: Int) {
var j = i
while(j < ntil) {
- var curr = a(j).asInstanceOf[T]
+ val curr = a(j).asInstanceOf[T]
if (!pred(curr)) cb += curr
j += 1
}
@@ -576,8 +575,6 @@ self =>
/* operations */
- private def asTask[R, Tp](t: Any) = t.asInstanceOf[Task[R, Tp]]
-
private def buildsArray[S, That](c: Builder[S, That]) = c.isInstanceOf[ParArrayCombiner[_]]
override def map[S, That](f: T => S)(implicit bf: CanBuildFrom[ParArray[T], S, That]) = if (buildsArray(bf(repr))) {
@@ -612,7 +609,8 @@ self =>
class ScanToArray[U >: T](tree: ScanTree[U], z: U, op: (U, U) => U, targetarr: Array[Any])
extends Task[Unit, ScanToArray[U]] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = iterate(tree)
private def iterate(tree: ScanTree[U]): Unit = tree match {
case ScanNode(left, right) =>
@@ -648,7 +646,8 @@ self =>
}
class Map[S](f: T => S, targetarr: Array[Any], offset: Int, howmany: Int) extends Task[Unit, Map[S]] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = {
val tarr = targetarr
val sarr = array
@@ -701,7 +700,7 @@ object ParArray extends ParFactory[ParArray] {
private def wrapOrRebuild[T](arr: AnyRef, sz: Int) = arr match {
case arr: Array[AnyRef] => new ParArray[T](new ExposedArraySeq[T](arr, sz))
- case _ => new ParArray[T](new ExposedArraySeq[T](runtime.ScalaRunTime.toObjectArray(arr), sz))
+ case _ => new ParArray[T](new ExposedArraySeq[T](scala.runtime.ScalaRunTime.toObjectArray(arr), sz))
}
def createFromCopy[T <: AnyRef : ClassTag](arr: Array[T]): ParArray[T] = {
@@ -719,27 +718,3 @@ object ParArray extends ParFactory[ParArray] {
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
index 8bc108a738..afc2d6e987 100644
--- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package parallel.mutable
import scala.collection.parallel.IterableSplitter
@@ -38,10 +39,6 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
}
}
- private def checkbounds() = if (idx >= itertable.length) {
- throw new IndexOutOfBoundsException(idx.toString)
- }
-
def newIterator(index: Int, until: Int, totalsize: Int): IterableSplitter[T]
def remaining = totalsize - traversed
@@ -52,7 +49,7 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
idx += 1
if (hasNext) scan()
r
- } else Iterator.empty.next
+ } else Iterator.empty.next()
def dup = newIterator(idx, until, totalsize)
def split = if (remaining > 1) {
val divpt = (until + idx) / 2
@@ -102,11 +99,5 @@ trait ParFlatHashTable[T] extends scala.collection.mutable.FlatHashTable[T] {
}
count
}
-
- private def check() = if (table.slice(idx, until).count(_ != null) != remaining) {
- println("Invariant broken: " + debugInformation)
- assert(false)
- }
}
-
}
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 11588e555b..42a3302c91 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
package mutable
@@ -97,7 +98,8 @@ self =>
class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V])
extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) {
- def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value);
+ def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value)
+
def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) =
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
@@ -166,9 +168,8 @@ private[mutable] abstract class ParHashMapCombiner[K, V](private val tableLoadFa
extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntry[K, V], ParHashMapCombiner[K, V]](ParHashMapCombiner.numblocks)
with scala.collection.mutable.HashTable.HashUtils[K]
{
- private var mask = ParHashMapCombiner.discriminantmask
- private var nonmasklen = ParHashMapCombiner.nonmasklength
- private var seedvalue = 27
+ private val nonmasklen = ParHashMapCombiner.nonmasklength
+ private val seedvalue = 27
def +=(elem: (K, V)) = {
sz += 1
@@ -232,8 +233,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
def setSize(sz: Int) = tableSize = sz
def insertEntry(/*block: Int, */e: DefaultEntry[K, V]) = {
var h = index(elemHashCode(e.key))
- // assertCorrectBlock(h, block)
- var olde = table(h).asInstanceOf[DefaultEntry[K, V]]
+ val olde = table(h).asInstanceOf[DefaultEntry[K, V]]
// check if key already exists
var ce = olde
@@ -252,13 +252,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
true
} else false
}
- private def assertCorrectBlock(h: Int, block: Int) {
- val blocksize = table.length / (1 << ParHashMapCombiner.discriminantbits)
- if (!(h >= block * blocksize && h < (block + 1) * blocksize)) {
- println("trying to put " + h + " into block no.: " + block + ", range: [" + block * blocksize + ", " + (block + 1) * blocksize + ">")
- assert(h >= block * blocksize && h < (block + 1) * blocksize)
- }
- }
protected def createNewEntry[X](key: K, x: X) = ???
}
@@ -288,7 +281,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
val chunksz = unrolled.size
while (i < chunksz) {
val elem = chunkarr(i)
- // assertCorrectBlock(block, elem.key)
if (t.insertEntry(elem)) insertcount += 1
i += 1
}
@@ -297,13 +289,6 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
}
insertcount
}
- private def assertCorrectBlock(block: Int, k: K) {
- val hc = improve(elemHashCode(k), seedvalue)
- if ((hc >>> nonmasklen) != block) {
- println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block)
- assert((hc >>> nonmasklen) == block)
- }
- }
def split = {
val fp = howmany / 2
List(new FillBlocks(buckets, table, offset, fp), new FillBlocks(buckets, table, offset + fp, howmany - fp))
@@ -320,7 +305,7 @@ extends scala.collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], Defau
private[parallel] object ParHashMapCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
- private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+ private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
private[mutable] val nonmasklength = 32 - discriminantbits
def apply[K, V] = new ParHashMapCombiner[K, V](HashTable.defaultLoadFactor) {} // was: with EnvironmentPassingCombiner[(K, V), ParHashMap[K, V]]
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 57fab57348..1e3d57e0e5 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -60,18 +61,18 @@ extends ParSet[T]
override def seq = new scala.collection.mutable.HashSet(hashTableContents)
def +=(elem: T) = {
- addEntry(elem)
+ addElem(elem)
this
}
def -=(elem: T) = {
- removeEntry(elem)
+ removeElem(elem)
this
}
override def stringPrefix = "ParHashSet"
- def contains(elem: T) = containsEntry(elem)
+ def contains(elem: T) = containsElem(elem)
def splitter = new ParHashSetIterator(0, table.length, size)
@@ -117,23 +118,23 @@ object ParHashSet extends ParSetFactory[ParHashSet] {
private[mutable] abstract class ParHashSetCombiner[T](private val tableLoadFactor: Int)
-extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], Any, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
+extends scala.collection.parallel.BucketCombiner[T, ParHashSet[T], AnyRef, ParHashSetCombiner[T]](ParHashSetCombiner.numblocks)
with scala.collection.mutable.FlatHashTable.HashUtils[T] {
//self: EnvironmentPassingCombiner[T, ParHashSet[T]] =>
- private var mask = ParHashSetCombiner.discriminantmask
- private var nonmasklen = ParHashSetCombiner.nonmasklength
- private var seedvalue = 27
+ private val nonmasklen = ParHashSetCombiner.nonmasklength
+ private val seedvalue = 27
def +=(elem: T) = {
+ val entry = elemToEntry(elem)
sz += 1
- val hc = improve(elemHashCode(elem), seedvalue)
+ val hc = improve(entry.hashCode, seedvalue)
val pos = hc >>> nonmasklen
if (buckets(pos) eq null) {
// initialize bucket
- buckets(pos) = new UnrolledBuffer[Any]
+ buckets(pos) = new UnrolledBuffer[AnyRef]
}
// add to bucket
- buckets(pos) += elem
+ buckets(pos) += entry
this
}
@@ -147,7 +148,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue)
val (inserted, leftovers) = combinerTaskSupport.executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length))
var leftinserts = 0
- for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T])
+ for (entry <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, entry)
table.setSize(leftinserts + inserted)
table.hashTableContents
}
@@ -159,10 +160,10 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
sizeMapInit(table.length)
seedvalue = ParHashSetCombiner.this.seedvalue
for {
- buffer <- buckets;
- if buffer ne null;
- elem <- buffer
- } addEntry(elem.asInstanceOf[T])
+ buffer <- buckets
+ if buffer ne null
+ entry <- buffer
+ } addEntry(entry)
}
tbl.hashTableContents
}
@@ -189,12 +190,12 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
def setSize(sz: Int) = tableSize = sz
/**
- * The elements are added using the `insertEntry` method. This method accepts three
+ * The elements are added using the `insertElem` method. This method accepts three
* arguments:
*
* @param insertAt where to add the element (set to -1 to use its hashcode)
* @param comesBefore the position before which the element should be added to
- * @param elem the element to be added
+ * @param newEntry the element to be added
*
* If the element is to be inserted at the position corresponding to its hash code,
* the table will try to add the element in such a position if possible. Collisions are resolved
@@ -206,17 +207,17 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
* If the element is already present in the hash table, it is not added, and this method
* returns 0. If the element is added, it returns 1.
*/
- def insertEntry(insertAt: Int, comesBefore: Int, elem: T): Int = {
+ def insertEntry(insertAt: Int, comesBefore: Int, newEntry : AnyRef): Int = {
var h = insertAt
- if (h == -1) h = index(elemHashCode(elem))
- var entry = table(h)
- while (null != entry) {
- if (entry == elem) return 0
+ if (h == -1) h = index(newEntry.hashCode)
+ var curEntry = table(h)
+ while (null != curEntry) {
+ if (curEntry == newEntry) return 0
h = h + 1 // we *do not* do `(h + 1) % table.length` here, because we'll never overflow!!
if (h >= comesBefore) return -1
- entry = table(h)
+ curEntry = table(h)
}
- table(h) = elem.asInstanceOf[AnyRef]
+ table(h) = newEntry
// this is incorrect since we set size afterwards anyway and a counter
// like this would not even work:
@@ -233,13 +234,14 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
/* tasks */
- class FillBlocks(buckets: Array[UnrolledBuffer[Any]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
- extends Task[(Int, UnrolledBuffer[Any]), FillBlocks] {
- var result = (Int.MinValue, new UnrolledBuffer[Any]);
- def leaf(prev: Option[(Int, UnrolledBuffer[Any])]) {
+ class FillBlocks(buckets: Array[UnrolledBuffer[AnyRef]], table: AddingFlatHashTable, val offset: Int, val howmany: Int)
+ extends Task[(Int, UnrolledBuffer[AnyRef]), FillBlocks] {
+ var result = (Int.MinValue, new UnrolledBuffer[AnyRef])
+
+ def leaf(prev: Option[(Int, UnrolledBuffer[AnyRef])]) {
var i = offset
var totalinserts = 0
- var leftover = new UnrolledBuffer[Any]()
+ var leftover = new UnrolledBuffer[AnyRef]()
while (i < (offset + howmany)) {
val (inserted, intonextblock) = fillBlock(i, buckets(i), leftover)
totalinserts += inserted
@@ -251,11 +253,11 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
private val blocksize = table.tableLength >> ParHashSetCombiner.discriminantbits
private def blockStart(block: Int) = block * blocksize
private def nextBlockStart(block: Int) = (block + 1) * blocksize
- private def fillBlock(block: Int, elems: UnrolledBuffer[Any], leftovers: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
+ private def fillBlock(block: Int, elems: UnrolledBuffer[AnyRef], leftovers: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = {
val beforePos = nextBlockStart(block)
// store the elems
- val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[Any]())
+ val (elemsIn, elemsLeft) = if (elems != null) insertAll(-1, beforePos, elems) else (0, UnrolledBuffer[AnyRef]())
// store the leftovers
val (leftoversIn, leftoversLeft) = insertAll(blockStart(block), beforePos, leftovers)
@@ -263,21 +265,21 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
// return the no. of stored elements tupled with leftovers
(elemsIn + leftoversIn, elemsLeft concat leftoversLeft)
}
- private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[Any]): (Int, UnrolledBuffer[Any]) = {
- var leftovers = new UnrolledBuffer[Any]
+ private def insertAll(atPos: Int, beforePos: Int, elems: UnrolledBuffer[AnyRef]): (Int, UnrolledBuffer[AnyRef]) = {
+ val leftovers = new UnrolledBuffer[AnyRef]
var inserted = 0
var unrolled = elems.headPtr
var i = 0
- var t = table
+ val t = table
while (unrolled ne null) {
val chunkarr = unrolled.array
val chunksz = unrolled.size
while (i < chunksz) {
- val elem = chunkarr(i)
- val res = t.insertEntry(atPos, beforePos, elem.asInstanceOf[T])
+ val entry = chunkarr(i)
+ val res = t.insertEntry(atPos, beforePos, entry)
if (res >= 0) inserted += res
- else leftovers += elem
+ else leftovers += entry
i += 1
}
i = 0
@@ -319,7 +321,7 @@ with scala.collection.mutable.FlatHashTable.HashUtils[T] {
private[parallel] object ParHashSetCombiner {
private[mutable] val discriminantbits = 5
private[mutable] val numblocks = 1 << discriminantbits
- private[mutable] val discriminantmask = ((1 << discriminantbits) - 1);
+ private[mutable] val discriminantmask = ((1 << discriminantbits) - 1)
private[mutable] val nonmasklength = 32 - discriminantbits
def apply[T] = new ParHashSetCombiner[T](FlatHashTable.defaultLoadFactor) {} //with EnvironmentPassingCombiner[T, ParHashSet[T]]
diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
index 66ddef6a1e..a6fada3d42 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala
@@ -7,7 +7,8 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.mutable
@@ -110,7 +111,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends scala.collec
} else Seq(this.asInstanceOf[IterRepr])
private def convertToArrayBuffer(chainhead: Entry): mutable.ArrayBuffer[T] = {
- var buff = mutable.ArrayBuffer[Entry]()
+ val buff = mutable.ArrayBuffer[Entry]()
var curr = chainhead
while (curr ne null) {
buff += curr
diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala
index 7090c510a0..4659149106 100644
--- a/src/library/scala/collection/parallel/mutable/ParIterable.scala
+++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala
@@ -6,14 +6,12 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
-
+package scala
+package collection
+package parallel.mutable
import scala.collection.generic._
-import scala.collection.parallel.ParIterableLike
-import scala.collection.parallel.Combiner
-import scala.collection.GenIterable
-
+import scala.collection.parallel.{ ParIterableLike, Combiner }
/** A template trait for mutable parallel iterable collections.
*
@@ -26,7 +24,7 @@ import scala.collection.GenIterable
* @author Aleksandar Prokopec
* @since 2.9
*/
-trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
+trait ParIterable[T] extends scala.collection.GenIterable[T]
with scala.collection.parallel.ParIterable[T]
with GenericParTemplate[T, ParIterable]
with ParIterableLike[T, ParIterable[T], Iterable[T]]
@@ -45,24 +43,8 @@ trait ParIterable[T] extends scala.collection/*.mutable*/.GenIterable[T]
/** $factoryInfo
*/
object ParIterable extends ParFactory[ParIterable] {
- implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] =
- new GenericCanCombineFrom[T]
+ implicit def canBuildFrom[T]: CanCombineFrom[Coll, T, ParIterable[T]] = new GenericCanCombineFrom[T]
def newBuilder[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
-
def newCombiner[T]: Combiner[T, ParIterable[T]] = ParArrayCombiner[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala
index 2250a38466..e43e72e559 100644
--- a/src/library/scala/collection/parallel/mutable/ParMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMap.scala
@@ -6,17 +6,13 @@
** |/ **
\* */
-
-package scala.collection.parallel.mutable
-
-
-
+package scala
+package collection
+package parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
-
-
/** A template trait for mutable parallel maps.
*
* $sideeffects
@@ -28,11 +24,11 @@ import scala.collection.parallel.Combiner
* @since 2.9
*/
trait ParMap[K, V]
-extends scala.collection/*.mutable*/.GenMap[K, V]
- with scala.collection.parallel.ParMap[K, V]
- with /* mutable */ ParIterable[(K, V)]
+extends GenMap[K, V]
+ with parallel.ParMap[K, V]
+ with ParIterable[(K, V)]
with GenericParMapTemplate[K, V, ParMap]
- with /* mutable */ ParMapLike[K, V, ParMap[K, V], scala.collection.mutable.Map[K, V]]
+ with ParMapLike[K, V, ParMap[K, V], mutable.Map[K, V]]
{
protected[this] override def newCombiner: Combiner[(K, V), ParMap[K, V]] = ParMap.newCombiner[K, V]
@@ -63,11 +59,8 @@ extends scala.collection/*.mutable*/.GenMap[K, V]
* @return a wrapper of the map with a default value
*/
def withDefaultValue(d: V): scala.collection.parallel.mutable.ParMap[K, V] = new ParMap.WithDefault[K, V](this, x => d)
-
}
-
-
object ParMap extends ParMapFactory[ParMap] {
def empty[K, V]: ParMap[K, V] = new ParHashMap[K, V]
@@ -94,22 +87,3 @@ object ParMap extends ParMapFactory[ParMap] {
override def withDefaultValue(d: V): ParMap[K, V] = new WithDefault[K, V](underlying, x => d)
}
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
index cdcfc59f8f..d96b5482fe 100644
--- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala
@@ -6,19 +6,17 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
package mutable
import scala.collection.generic._
-import scala.collection.mutable.Builder
import scala.collection.mutable.Cloneable
import scala.collection.generic.Growable
import scala.collection.generic.Shrinkable
-
-
/** A template trait for mutable parallel maps. This trait is to be mixed in
* with concrete parallel maps to override the representation type.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala
index 95a4d4a13a..35be2669f8 100644
--- a/src/library/scala/collection/parallel/mutable/ParSeq.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-package scala.collection.parallel.mutable
-
+package scala
+package collection.parallel.mutable
import scala.collection.generic.GenericParTemplate
import scala.collection.generic.GenericCompanion
@@ -17,12 +16,6 @@ import scala.collection.generic.CanCombineFrom
import scala.collection.generic.ParFactory
import scala.collection.parallel.ParSeqLike
import scala.collection.parallel.Combiner
-import scala.collection.GenSeq
-
-
-
-
-
/** A mutable variant of `ParSeq`.
*
@@ -57,18 +50,3 @@ object ParSeq extends ParFactory[ParSeq] {
def newCombiner[T]: Combiner[T, ParSeq[T]] = ParArrayCombiner[T]
}
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala
index d8f821746c..9367f1424d 100644
--- a/src/library/scala/collection/parallel/mutable/ParSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSet.scala
@@ -6,18 +6,11 @@
** |/ **
\* */
-
-package scala.collection.parallel.mutable
-
-
+package scala
+package collection.parallel.mutable
import scala.collection.generic._
import scala.collection.parallel.Combiner
-import scala.collection.GenSet
-
-
-
-
/** A mutable variant of `ParSet`.
*
@@ -51,4 +44,3 @@ object ParSet extends ParSetFactory[ParSet] {
override def newCombiner[T]: Combiner[T, ParSet[T]] = ParHashSet.newCombiner
}
-
diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
index 609888f1a9..1cfc14b094 100644
--- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala
+++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala
@@ -7,20 +7,15 @@
\* */
-package scala.collection
+package scala
+package collection
package parallel.mutable
-
-
-import scala.collection.mutable.Set
-import scala.collection.mutable.Builder
import scala.collection.mutable.Cloneable
import scala.collection.GenSetLike
import scala.collection.generic.Growable
import scala.collection.generic.Shrinkable
-
-
/** A template trait for mutable parallel sets. This trait is mixed in with concrete
* parallel sets to override the representation type.
*
diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
index 61a50a124d..82f2717132 100644
--- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -136,7 +137,7 @@ extends TrieMapIterator[K, V](lev, ct, mustInit)
}
def dup = {
- val it = newIterator(0, ct, false)
+ val it = newIterator(0, ct, _mustInit = false)
dupTo(it)
it.iterated = this.iterated
it
diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
index dc31d1bc25..79322c85b1 100644
--- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
@@ -26,7 +27,7 @@ trait ResizableParArrayCombiner[T] extends LazyCombiner[T, ParArray[T], ExposedA
override def sizeHint(sz: Int) = if (chain.length == 1) chain(0).sizeHint(sz)
// public method with private[mutable] type ExposedArrayBuffer in parameter type; cannot be overridden.
- def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
+ final def newLazyCombiner(c: ArrayBuffer[ExposedArrayBuffer[T]]) = ResizableParArrayCombiner(c)
def allocateAndCopy = if (chain.size > 1) {
val arrayseq = new ArraySeq[T](size)
diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
index c3a379485d..6b6ea03a6d 100644
--- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
+++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel.mutable
+package scala
+package collection.parallel.mutable
import scala.collection.generic.Sizing
import scala.collection.mutable.ArraySeq
@@ -47,7 +48,7 @@ extends Combiner[T, ParArray[T]] {
}
def clear() {
- buff.clear
+ buff.clear()
}
override def sizeHint(sz: Int) = {
@@ -69,7 +70,8 @@ extends Combiner[T, ParArray[T]] {
class CopyUnrolledToArray(array: Array[Any], offset: Int, howmany: Int)
extends Task[Unit, CopyUnrolledToArray] {
- var result = ();
+ var result = ()
+
def leaf(prev: Option[Unit]) = if (howmany > 0) {
var totalleft = howmany
val (startnode, startpos) = findStart(offset)
diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala
index 2494d0907e..81121d9398 100644
--- a/src/library/scala/collection/parallel/mutable/package.scala
+++ b/src/library/scala/collection/parallel/mutable/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection.parallel
+package scala
+package collection.parallel
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.ArraySeq
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 988886b4ea..b25553d2c8 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -41,9 +41,7 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
- private[parallel] def getTaskSupport: TaskSupport =
- if (scala.util.Properties.isJavaAtLeast("1.6")) new ForkJoinTaskSupport
- else new ThreadPoolTaskSupport
+ private[parallel] def getTaskSupport: TaskSupport = new ForkJoinTaskSupport
val defaultTaskSupport: TaskSupport = getTaskSupport
@@ -116,7 +114,9 @@ package parallel {
def toParArray: ParArray[T]
}
+ @deprecated("This trait will be removed.", "2.11.0")
trait ThrowableOps {
+ @deprecated("This method will be removed.", "2.11.0")
def alongWith(that: Throwable): Throwable
}
@@ -135,9 +135,8 @@ package parallel {
}
/** Composite throwable - thrown when multiple exceptions are thrown at the same time. */
- final case class CompositeThrowable(
- val throwables: Set[Throwable]
- ) extends Exception(
+ @deprecated("This class will be removed.", "2.11.0")
+ final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception(
"Multiple exceptions thrown during a parallel computation: " +
throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n")
)
diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala
index cd64fa2d73..e485737972 100644
--- a/src/library/scala/collection/script/Location.scala
+++ b/src/library/scala/collection/script/Location.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package script
/** Class `Location` describes locations in messages implemented by
diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala
index 2ab7ea726a..dc3e74e170 100644
--- a/src/library/scala/collection/script/Message.scala
+++ b/src/library/scala/collection/script/Message.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package script
import mutable.ArrayBuffer
@@ -69,7 +70,7 @@ class Script[A] extends ArrayBuffer[Message[A]] with Message[A] {
override def toString(): String = {
var res = "Script("
- var it = this.iterator
+ val it = this.iterator
var i = 1
while (it.hasNext) {
if (i > 1)
diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala
index ceaf19a464..110a0f4d82 100644
--- a/src/library/scala/collection/script/Scriptable.scala
+++ b/src/library/scala/collection/script/Scriptable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.collection
+package scala
+package collection
package script
/** Classes that mix in the `Scriptable` class allow messages to be sent to
diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala
index 88cb1506ae..875d811b9b 100644
--- a/src/library/scala/compat/Platform.scala
+++ b/src/library/scala/compat/Platform.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.compat
+package scala
+package compat
import java.lang.System
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
new file mode 100644
index 0000000000..a0d7aaea47
--- /dev/null
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -0,0 +1,117 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import java.util.concurrent.Executor
+import scala.annotation.tailrec
+
+/**
+ * Mixin trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+private[concurrent] trait BatchingExecutor extends Executor {
+
+ // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+ private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+ private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+ private var parentBlockContext: BlockContext = _
+ // this method runs in the delegate ExecutionContext's thread
+ override def run(): Unit = {
+ require(_tasksLocal.get eq null)
+
+ val prevBlockContext = BlockContext.current
+ BlockContext.withBlockContext(this) {
+ try {
+ parentBlockContext = prevBlockContext
+
+ @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+ case Nil => ()
+ case head :: tail =>
+ _tasksLocal set tail
+ try {
+ head.run()
+ } catch {
+ case t: Throwable =>
+ // if one task throws, move the
+ // remaining tasks to another thread
+ // so we can throw the exception
+ // up to the invoking executor
+ val remaining = _tasksLocal.get
+ _tasksLocal set Nil
+ unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+ throw t // rethrow
+ }
+ processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+ }
+
+ processBatch(initial)
+ } finally {
+ _tasksLocal.remove()
+ parentBlockContext = null
+ }
+ }
+ }
+
+ override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+ // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+ {
+ val tasks = _tasksLocal.get
+ _tasksLocal set Nil
+ if ((tasks ne null) && tasks.nonEmpty)
+ unbatchedExecute(new Batch(tasks))
+ }
+
+ // now delegate the blocking to the previous BC
+ require(parentBlockContext ne null)
+ parentBlockContext.blockOn(thunk)
+ }
+ }
+
+ protected def unbatchedExecute(r: Runnable): Unit
+
+ override def execute(runnable: Runnable): Unit = {
+ if (batchable(runnable)) { // If we can batch the runnable
+ _tasksLocal.get match {
+ case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+ }
+ } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
+ }
+
+ /** Override this to define which runnables will be batched. */
+ def batchable(runnable: Runnable): Boolean = runnable match {
+ case _: OnCompleteRunnable => true
+ case _ => false
+ }
+}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index 8928724b34..68513f9c80 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -25,7 +25,7 @@ trait ExecutionContext {
/** Reports that an asynchronous computation failed.
*/
- def reportFailure(t: Throwable): Unit
+ def reportFailure(@deprecatedName('t) cause: Throwable): Unit
/** Prepares for the execution of a task. Returns the prepared
* execution context. A valid implementation of `prepare` is one
@@ -83,7 +83,7 @@ object ExecutionContext {
/** The default reporter simply prints the stack trace of the `Throwable` to System.err.
*/
- def defaultReporter: Throwable => Unit = (t: Throwable) => t.printStackTrace()
+ def defaultReporter: Throwable => Unit = _.printStackTrace()
}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 6b6ad29074..2e4c72cd71 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -71,7 +71,7 @@ import scala.reflect.ClassTag
* val g = future { 3 }
* val h = for {
* x: Int <- f // returns Future(5)
- * y: Int <- g // returns Future(5)
+ * y: Int <- g // returns Future(3)
* } yield x + y
* }}}
*
@@ -133,8 +133,8 @@ trait Future[+T] extends Awaitable[T] {
* $multipleCallbacks
* $callbackInContext
*/
- def onFailure[U](callback: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Failure(t) if NonFatal(t) && callback.isDefinedAt(t) => callback(t)
+ def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
+ case Failure(t) if NonFatal(t) && pf.isDefinedAt(t) => pf(t)
case _ =>
}(executor)
@@ -147,7 +147,7 @@ trait Future[+T] extends Awaitable[T] {
* $multipleCallbacks
* $callbackInContext
*/
- def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit
+ def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit
/* Miscellaneous */
@@ -303,21 +303,21 @@ trait Future[+T] extends Awaitable[T] {
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
- def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
- val p = Promise[T]()
+ def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
+ val promise = Promise[T]()
onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[T]]
+ case f: Failure[_] => promise complete f.asInstanceOf[Failure[T]]
case Success(v) =>
try {
- if (pred(v)) p success v
- else p failure new NoSuchElementException("Future.filter predicate is not satisfied")
+ if (p(v)) promise success v
+ else promise failure new NoSuchElementException("Future.filter predicate is not satisfied")
} catch {
- case NonFatal(t) => p failure t
+ case NonFatal(t) => promise failure t
}
}(executor)
- p.future
+ promise.future
}
/** Used by for-comprehensions.
@@ -565,18 +565,18 @@ object Future {
*
* @tparam T the type of the result
* @param body the asychronous computation
- * @param execctx the execution context on which the future is run
+ * @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
- def apply[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = impl.Future(body)
+ def apply[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = impl.Future(body)
/** Simple version of `Futures.traverse`. Transforms a `TraversableOnce[Future[A]]` into a `Future[TraversableOnce[A]]`.
* Useful for reducing many `Future`s into a single `Future`.
*/
def sequence[A, M[_] <: TraversableOnce[_]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = {
- in.foldLeft(Promise.successful(cbf(in)).future) {
+ in.foldLeft(successful(cbf(in))) {
(fr, fa) => for (r <- fr; a <- fa.asInstanceOf[Future[A]]) yield (r += a)
- } map (_.result)
+ } map (_.result())
}
/** Returns a `Future` to the result of the first future in the list that is completed.
@@ -592,15 +592,15 @@ object Future {
/** Returns a `Future` that will hold the optional result of the first `Future` with a result that matches the predicate.
*/
- def find[T](futurestravonce: TraversableOnce[Future[T]])(predicate: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
- val futures = futurestravonce.toBuffer
- if (futures.isEmpty) Promise.successful[Option[T]](None).future
+ def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = {
+ val futuresBuffer = futures.toBuffer
+ if (futuresBuffer.isEmpty) successful[Option[T]](None)
else {
val result = Promise[Option[T]]()
- val ref = new AtomicInteger(futures.size)
+ val ref = new AtomicInteger(futuresBuffer.size)
val search: Try[T] => Unit = v => try {
v match {
- case Success(r) => if (predicate(r)) result tryComplete Success(Some(r))
+ case Success(r) if p(r) => result tryComplete Success(Some(r))
case _ =>
}
} finally {
@@ -609,7 +609,7 @@ object Future {
}
}
- futures.foreach(_ onComplete search)
+ futuresBuffer.foreach(_ onComplete search)
result.future
}
@@ -625,9 +625,9 @@ object Future {
* val result = Await.result(Future.fold(futures)(0)(_ + _), 5 seconds)
* }}}
*/
- def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(foldFun: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise.successful(zero).future
- else sequence(futures).map(_.foldLeft(zero)(foldFun))
+ def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
+ if (futures.isEmpty) successful(zero)
+ else sequence(futures).map(_.foldLeft(zero)(op))
}
/** Initiates a fold over the supplied futures where the fold-zero is the result value of the `Future` that's completed first.
@@ -638,7 +638,7 @@ object Future {
* }}}
*/
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
- if (futures.isEmpty) Promise[R].failure(new NoSuchElementException("reduce attempted on empty collection")).future
+ if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection"))
else sequence(futures).map(_ reduceLeft op)
}
@@ -651,10 +651,10 @@ object Future {
* }}}
*/
def traverse[A, B, M[_] <: TraversableOnce[_]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] =
- in.foldLeft(Promise.successful(cbf(in)).future) { (fr, a) =>
+ in.foldLeft(successful(cbf(in))) { (fr, a) =>
val fb = fn(a.asInstanceOf[A])
for (r <- fr; b <- fb) yield (r += b)
- }.map(_.result)
+ }.map(_.result())
// This is used to run callbacks which are internal
// to scala.concurrent; our own callbacks are only
@@ -675,111 +675,11 @@ object Future {
// by just not ever using it itself. scala.concurrent
// doesn't need to create defaultExecutionContext as
// a side effect.
- private[concurrent] object InternalCallbackExecutor extends ExecutionContext with java.util.concurrent.Executor {
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor {
+ override protected def unbatchedExecute(r: Runnable): Unit =
+ r.run()
override def reportFailure(t: Throwable): Unit =
throw new IllegalStateException("problem in scala.concurrent internal callback", t)
-
- /**
- * The BatchingExecutor trait had to be inlined into InternalCallbackExecutor for binary compatibility.
- *
- * BatchingExecutor is a trait for an Executor
- * which groups multiple nested `Runnable.run()` calls
- * into a single Runnable passed to the original
- * Executor. This can be a useful optimization
- * because it bypasses the original context's task
- * queue and keeps related (nested) code on a single
- * thread which may improve CPU affinity. However,
- * if tasks passed to the Executor are blocking
- * or expensive, this optimization can prevent work-stealing
- * and make performance worse. Also, some ExecutionContext
- * may be fast enough natively that this optimization just
- * adds overhead.
- * The default ExecutionContext.global is already batching
- * or fast enough not to benefit from it; while
- * `fromExecutor` and `fromExecutorService` do NOT add
- * this optimization since they don't know whether the underlying
- * executor will benefit from it.
- * A batching executor can create deadlocks if code does
- * not use `scala.concurrent.blocking` when it should,
- * because tasks created within other tasks will block
- * on the outer task completing.
- * This executor may run tasks in any order, including LIFO order.
- * There are no ordering guarantees.
- *
- * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
- * in the calling thread synchronously. It must enqueue/handoff the Runnable.
- */
- // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
- private val _tasksLocal = new ThreadLocal[List[Runnable]]()
-
- private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
- private[this] var parentBlockContext: BlockContext = _
- // this method runs in the delegate ExecutionContext's thread
- override def run(): Unit = {
- require(_tasksLocal.get eq null)
-
- val prevBlockContext = BlockContext.current
- BlockContext.withBlockContext(this) {
- try {
- parentBlockContext = prevBlockContext
-
- @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
- case Nil => ()
- case head :: tail =>
- _tasksLocal set tail
- try {
- head.run()
- } catch {
- case t: Throwable =>
- // if one task throws, move the
- // remaining tasks to another thread
- // so we can throw the exception
- // up to the invoking executor
- val remaining = _tasksLocal.get
- _tasksLocal set Nil
- unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
- throw t // rethrow
- }
- processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
- }
-
- processBatch(initial)
- } finally {
- _tasksLocal.remove()
- parentBlockContext = null
- }
- }
- }
-
- override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
- // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
- {
- val tasks = _tasksLocal.get
- _tasksLocal set Nil
- if ((tasks ne null) && tasks.nonEmpty)
- unbatchedExecute(new Batch(tasks))
- }
-
- // now delegate the blocking to the previous BC
- require(parentBlockContext ne null)
- parentBlockContext.blockOn(thunk)
- }
- }
-
- override def execute(runnable: Runnable): Unit = runnable match {
- // If we can batch the runnable
- case _: OnCompleteRunnable =>
- _tasksLocal.get match {
- case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
- case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
- }
-
- // If not batchable, just delegate to underlying
- case _ =>
- unbatchedExecute(runnable)
- }
-
- private def unbatchedExecute(r: Runnable): Unit = r.run()
}
}
diff --git a/src/library/scala/concurrent/FutureTaskRunner.scala b/src/library/scala/concurrent/FutureTaskRunner.scala
index eeadaddb5e..089e67cedd 100644
--- a/src/library/scala/concurrent/FutureTaskRunner.scala
+++ b/src/library/scala/concurrent/FutureTaskRunner.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2009-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2009-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -10,13 +10,13 @@ package scala.concurrent
import scala.language.{implicitConversions, higherKinds}
-/** The `FutureTaskRunner</code> trait is a base trait of task runners
+/** The `FutureTaskRunner` trait is a base trait of task runners
* that provide some sort of future abstraction.
*
* @author Philipp Haller
*/
@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait FutureTaskRunner extends TaskRunner {
+private[scala] trait FutureTaskRunner extends TaskRunner {
/** The type of the futures that the underlying task runner supports.
*/
diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala
index d6a7c1f1bb..3d0597ca22 100644
--- a/src/library/scala/concurrent/JavaConversions.scala
+++ b/src/library/scala/concurrent/JavaConversions.scala
@@ -18,38 +18,6 @@ import scala.language.implicitConversions
*/
object JavaConversions {
- @deprecated("Use `asExecutionContext` instead.", "2.10.0")
- implicit def asTaskRunner(exec: ExecutorService): FutureTaskRunner =
- new ThreadPoolRunner {
- override protected def executor =
- exec
-
- def shutdown() =
- exec.shutdown()
- }
-
- @deprecated("Use `asExecutionContext` instead.", "2.10.0")
- implicit def asTaskRunner(exec: Executor): TaskRunner =
- new TaskRunner {
- type Task[T] = Runnable
-
- implicit def functionAsTask[T](fun: () => T): Task[T] = new Runnable {
- def run() { fun() }
- }
-
- def execute[S](task: Task[S]) {
- exec.execute(task)
- }
-
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
- def shutdown() {
- // do nothing
- }
- }
-
/**
* Creates a new `ExecutionContext` which uses the provided `ExecutorService`.
*/
diff --git a/src/library/scala/concurrent/ManagedBlocker.scala b/src/library/scala/concurrent/ManagedBlocker.scala
index 7b2966c663..b5a6e21893 100644
--- a/src/library/scala/concurrent/ManagedBlocker.scala
+++ b/src/library/scala/concurrent/ManagedBlocker.scala
@@ -13,7 +13,7 @@ package scala.concurrent
* @author Philipp Haller
*/
@deprecated("Use `blocking` instead.", "2.10.0")
-trait ManagedBlocker {
+private[scala] trait ManagedBlocker {
/**
* Possibly blocks the current thread, for example waiting for
diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala
index 8355a73a1f..f950b13b78 100644
--- a/src/library/scala/concurrent/Promise.scala
+++ b/src/library/scala/concurrent/Promise.scala
@@ -86,7 +86,7 @@ trait Promise[T] {
*
* $promiseCompletion
*/
- def success(v: T): this.type = complete(Success(v))
+ def success(@deprecatedName('v) value: T): this.type = complete(Success(value))
/** Tries to complete the promise with a value.
*
@@ -104,7 +104,7 @@ trait Promise[T] {
*
* $promiseCompletion
*/
- def failure(t: Throwable): this.type = complete(Failure(t))
+ def failure(@deprecatedName('t) cause: Throwable): this.type = complete(Failure(cause))
/** Tries to complete the promise with an exception.
*
@@ -112,7 +112,7 @@ trait Promise[T] {
*
* @return If the promise has already been completed returns `false`, or `true` otherwise.
*/
- def tryFailure(t: Throwable): Boolean = tryComplete(Failure(t))
+ def tryFailure(@deprecatedName('t) cause: Throwable): Boolean = tryComplete(Failure(cause))
}
diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala
index 9ab7bcc572..76d21c3dbf 100644
--- a/src/library/scala/concurrent/SyncVar.scala
+++ b/src/library/scala/concurrent/SyncVar.scala
@@ -41,9 +41,9 @@ class SyncVar[A] {
* @return `None` if variable is undefined after `timeout`, `Some(value)` otherwise
*/
def get(timeout: Long): Option[A] = synchronized {
- /** Defending against the system clock going backward
- * by counting time elapsed directly. Loop required
- * to deal with spurious wakeups.
+ /* Defending against the system clock going backward
+ * by counting time elapsed directly. Loop required
+ * to deal with spurious wakeups.
*/
var rest = timeout
while (!isDefined && rest > 0) {
@@ -79,6 +79,7 @@ class SyncVar[A] {
// whether or not the SyncVar is already defined. So, set has been
// deprecated in order to eventually be able to make "setting" private
@deprecated("Use `put` instead, as `set` is potentionally error-prone", "2.10.0")
+ // NOTE: Used by SBT 0.13.0-M2 and below
def set(x: A): Unit = setVal(x)
/** Places a value in the SyncVar. If the SyncVar already has a stored value,
@@ -98,6 +99,7 @@ class SyncVar[A] {
// whether or not the SyncVar is already defined. So, unset has been
// deprecated in order to eventually be able to make "unsetting" private
@deprecated("Use `take` instead, as `unset` is potentionally error-prone", "2.10.0")
+ // NOTE: Used by SBT 0.13.0-M2 and below
def unset(): Unit = synchronized {
isDefined = false
value = None
diff --git a/src/library/scala/concurrent/TaskRunner.scala b/src/library/scala/concurrent/TaskRunner.scala
index a939a3f070..98c212d9fa 100644
--- a/src/library/scala/concurrent/TaskRunner.scala
+++ b/src/library/scala/concurrent/TaskRunner.scala
@@ -15,7 +15,7 @@ import scala.language.{higherKinds, implicitConversions}
* @author Philipp Haller
*/
@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait TaskRunner {
+private[scala] trait TaskRunner {
type Task[T]
diff --git a/src/library/scala/concurrent/TaskRunners.scala b/src/library/scala/concurrent/TaskRunners.scala
deleted file mode 100644
index e109a8abf9..0000000000
--- a/src/library/scala/concurrent/TaskRunners.scala
+++ /dev/null
@@ -1,36 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.util.concurrent.{ThreadPoolExecutor, LinkedBlockingQueue, TimeUnit}
-
-/** The `TaskRunners` object...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-object TaskRunners {
-
- implicit val threadRunner: FutureTaskRunner =
- new ThreadRunner
-
- implicit val threadPoolRunner: FutureTaskRunner = {
- val numCores = Runtime.getRuntime().availableProcessors()
- val keepAliveTime = 60000L
- val workQueue = new LinkedBlockingQueue[Runnable]
- val exec = new ThreadPoolExecutor(numCores,
- numCores,
- keepAliveTime,
- TimeUnit.MILLISECONDS,
- workQueue,
- new ThreadPoolExecutor.CallerRunsPolicy)
- JavaConversions.asTaskRunner(exec)
- }
-
-}
diff --git a/src/library/scala/concurrent/ThreadPoolRunner.scala b/src/library/scala/concurrent/ThreadPoolRunner.scala
index afa14ed2fa..7784681f71 100644
--- a/src/library/scala/concurrent/ThreadPoolRunner.scala
+++ b/src/library/scala/concurrent/ThreadPoolRunner.scala
@@ -17,7 +17,7 @@ import scala.language.implicitConversions
* @author Philipp Haller
*/
@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-trait ThreadPoolRunner extends FutureTaskRunner {
+private[scala] trait ThreadPoolRunner extends FutureTaskRunner {
type Task[T] = Callable[T] with Runnable
type Future[T] = java.util.concurrent.Future[T]
diff --git a/src/library/scala/concurrent/ThreadRunner.scala b/src/library/scala/concurrent/ThreadRunner.scala
deleted file mode 100644
index cd92db9486..0000000000
--- a/src/library/scala/concurrent/ThreadRunner.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.lang.Thread
-import scala.language.implicitConversions
-
-/** The `ThreadRunner` trait...
- *
- * @author Philipp Haller
- */
-@deprecated("Use `ExecutionContext` instead.", "2.10.0")
-class ThreadRunner extends FutureTaskRunner {
-
- type Task[T] = () => T
- type Future[T] = () => T
-
- implicit def functionAsTask[S](fun: () => S): Task[S] = fun
- implicit def futureAsFunction[S](x: Future[S]): () => S = x
-
- /* If expression computed successfully return it in `Right`,
- * otherwise return exception in `Left`.
- */
- private def tryCatch[A](body: => A): Either[Exception, A] =
- try Right(body) catch {
- case ex: Exception => Left(ex)
- }
-
- def execute[S](task: Task[S]) {
- val runnable = new Runnable {
- def run() { tryCatch(task()) }
- }
- (new Thread(runnable)).start()
- }
-
- def submit[S](task: Task[S]): Future[S] = {
- val result = new SyncVar[Either[Exception, S]]
- val runnable = new Runnable {
- def run() { result set tryCatch(task()) }
- }
- (new Thread(runnable)).start()
- () => result.get.fold[S](throw _, identity _)
- }
-
- @deprecated("Use `blocking` instead.", "2.10.0")
- def managedBlock(blocker: ManagedBlocker) {
- blocker.block()
- }
-
- def shutdown() {
- // do nothing
- }
-
-}
diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala
index 0353d61b22..a24266bf19 100644
--- a/src/library/scala/concurrent/duration/Duration.scala
+++ b/src/library/scala/concurrent/duration/Duration.scala
@@ -10,6 +10,7 @@ package scala.concurrent.duration
import java.lang.{ Double => JDouble, Long => JLong }
import scala.language.implicitConversions
+import scala.language.postfixOps
object Duration {
@@ -103,7 +104,7 @@ object Duration {
* Extract length and time unit out of a duration, if it is finite.
*/
def unapply(d: Duration): Option[(Long, TimeUnit)] =
- if (d.isFinite) Some((d.length, d.unit)) else None
+ if (d.isFinite()) Some((d.length, d.unit)) else None
/**
* Construct a possibly infinite or undefined Duration from the given number of nanoseconds.
@@ -623,7 +624,7 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
// if this is made a constant, then scalac will elide the conditional and always return +0.0, SI-6331
private[this] def minusZero = -0d
def /(divisor: Duration): Double =
- if (divisor.isFinite) toNanos.toDouble / divisor.toNanos
+ if (divisor.isFinite()) toNanos.toDouble / divisor.toNanos
else if (divisor eq Undefined) Double.NaN
else if ((length < 0) ^ (divisor > Zero)) 0d
else minusZero
@@ -663,8 +664,8 @@ final class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duratio
* Long.MinValue is not a legal `length` anyway.
*/
private def safeMul(_a: Long, _b: Long): Long = {
- val a = math.abs(_a)
- val b = math.abs(_b)
+ val a = scala.math.abs(_a)
+ val b = scala.math.abs(_b)
import java.lang.Long.{ numberOfLeadingZeros => leading }
if (leading(a) + leading(b) < 64) throw new IllegalArgumentException("multiplication overflow")
val product = a * b
diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala
index 2fd735f19e..b32d2b20cb 100644
--- a/src/library/scala/concurrent/duration/package.scala
+++ b/src/library/scala/concurrent/duration/package.scala
@@ -36,12 +36,12 @@ package object duration {
final val NANOSECONDS = java.util.concurrent.TimeUnit.NANOSECONDS
final val SECONDS = java.util.concurrent.TimeUnit.SECONDS
- implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1, p._2)
+ implicit def pairIntToDuration(p: (Int, TimeUnit)): Duration = Duration(p._1.toLong, p._2)
implicit def pairLongToDuration(p: (Long, TimeUnit)): FiniteDuration = Duration(p._1, p._2)
implicit def durationToPair(d: Duration): (Long, TimeUnit) = (d.length, d.unit)
implicit final class DurationInt(val n: Int) extends AnyVal with DurationConversions {
- override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n, unit)
+ override protected def durationIn(unit: TimeUnit): FiniteDuration = Duration(n.toLong, unit)
}
implicit final class DurationLong(val n: Long) extends AnyVal with DurationConversions {
@@ -60,16 +60,16 @@ package object duration {
* Avoid reflection based invocation by using non-duck type
*/
implicit final class IntMult(val i: Int) extends AnyVal {
- def *(d: Duration) = d * i
- def *(d: FiniteDuration) = d * i
+ def *(d: Duration) = d * i.toDouble
+ def *(d: FiniteDuration) = d * i.toLong
}
implicit final class LongMult(val i: Long) extends AnyVal {
- def *(d: Duration) = d * i
- def *(d: FiniteDuration) = d * i
+ def *(d: Duration) = d * i.toDouble
+ def *(d: FiniteDuration) = d * i.toLong
}
implicit final class DoubleMult(val f: Double) extends AnyVal {
- def *(d: Duration) = d * f
+ def *(d: Duration) = d * f.toDouble
}
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 0aa6b37ffc..479720287c 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -30,7 +30,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
// Implement BlockContext on FJP threads
- class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
+ class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
@@ -57,22 +57,22 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
def createExecutorService: ExecutorService = {
- def getInt(name: String, f: String => Int): Int =
- try f(System.getProperty(name)) catch { case e: Exception => Runtime.getRuntime.availableProcessors }
- def range(floor: Int, desired: Int, ceiling: Int): Int =
- if (ceiling < floor) range(ceiling, desired, floor) else scala.math.min(scala.math.max(desired, floor), ceiling)
+ def getInt(name: String, default: String) = (try System.getProperty(name, default) catch {
+ case e: SecurityException => default
+ }) match {
+ case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
+ case other => other.toInt
+ }
+
+ def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling)
val desiredParallelism = range(
- getInt("scala.concurrent.context.minThreads", _.toInt),
- getInt("scala.concurrent.context.numThreads", {
- case null | "" => Runtime.getRuntime.availableProcessors
- case s if s.charAt(0) == 'x' => (Runtime.getRuntime.availableProcessors * s.substring(1).toDouble).ceil.toInt
- case other => other.toInt
- }),
- getInt("scala.concurrent.context.maxThreads", _.toInt))
+ getInt("scala.concurrent.context.minThreads", "1"),
+ getInt("scala.concurrent.context.numThreads", "x1"),
+ getInt("scala.concurrent.context.maxThreads", "x1"))
val threadFactory = new DefaultThreadFactory(daemonic = true)
-
+
try {
new ForkJoinPool(
desiredParallelism,
@@ -96,12 +96,26 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
}
}
-
def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
- val fjt = runnable match {
+ val fjt: ForkJoinTask[_] = runnable match {
case t: ForkJoinTask[_] => t
- case runnable => new ForkJoinTask[Unit] {
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
+ Thread.currentThread match {
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+ case _ => fj execute fjt
+ }
+ case generic => generic execute runnable
+ }
+
+ def reportFailure(t: Throwable) = reporter(t)
+}
+
+
+private[concurrent] object ExecutionContextImpl {
+
+ final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
final override def setRawResult(u: Unit): Unit = ()
final override def getRawResult(): Unit = ()
final override def exec(): Boolean = try { runnable.run(); true } catch {
@@ -114,18 +128,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
throw anything
}
}
- }
- Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
- case _ => fj execute fjt
- }
- case generic => generic execute runnable
- }
-
- def reportFailure(t: Throwable) = reporter(t)
-}
-private[concurrent] object ExecutionContextImpl {
def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala
index 89d10e5c47..042d32c234 100644
--- a/src/library/scala/concurrent/impl/Future.scala
+++ b/src/library/scala/concurrent/impl/Future.scala
@@ -12,7 +12,7 @@ package scala.concurrent.impl
import scala.concurrent.ExecutionContext
import scala.util.control.NonFatal
-import scala.util.{Try, Success, Failure}
+import scala.util.{ Success, Failure }
private[concurrent] object Future {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index ed4039e2a5..ffaea8de96 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -55,7 +55,7 @@ private[concurrent] object Promise {
case e: Error => Failure(new ExecutionException("Boxed Error", e))
case t => Failure(t)
}
-
+
/*
* Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java
* Written by Doug Lea with assistance from members of JCP JSR-166
@@ -134,7 +134,7 @@ private[concurrent] object Promise {
}
def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
- val preparedEC = executor.prepare
+ val preparedEC = executor.prepare()
val runnable = new CallbackRunnable[T](preparedEC, func)
@tailrec //Tries to add the callback, if already completed, it dispatches the callback to be executed
@@ -161,7 +161,7 @@ private[concurrent] object Promise {
def onComplete[U](func: Try[T] => U)(implicit executor: ExecutionContext): Unit = {
val completedAs = value.get
- val preparedEC = executor.prepare
+ val preparedEC = executor.prepare()
(new CallbackRunnable(preparedEC, func)).executeWithValue(completedAs)
}
diff --git a/src/library/scala/concurrent/ops.scala b/src/library/scala/concurrent/ops.scala
deleted file mode 100644
index 4c91e78dc7..0000000000
--- a/src/library/scala/concurrent/ops.scala
+++ /dev/null
@@ -1,73 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.concurrent
-
-import java.lang.Thread
-import scala.util.control.Exception.allCatch
-
-/** The object `ops` ...
- *
- * @author Martin Odersky, Stepan Koltsov, Philipp Haller
- */
-@deprecated("Use `Future` instead.", "2.10.0")
-object ops
-{
- val defaultRunner: FutureTaskRunner = TaskRunners.threadRunner
-
- /**
- * If expression computed successfully return it in `Right`,
- * otherwise return exception in `Left`.
- */
- private def tryCatch[A](body: => A): Either[Throwable, A] =
- allCatch[A] either body
-
- private def getOrThrow[T <: Throwable, A](x: Either[T, A]): A =
- x.fold[A](throw _, identity _)
-
- /** Evaluates an expression asynchronously.
- *
- * @param p the expression to evaluate
- */
- def spawn(p: => Unit)(implicit runner: TaskRunner = defaultRunner): Unit = {
- runner execute runner.functionAsTask(() => p)
- }
-
- /** Evaluates an expression asynchronously, and returns a closure for
- * retrieving the result.
- *
- * @param p the expression to evaluate
- * @return a closure which returns the result once it has been computed
- */
- def future[A](p: => A)(implicit runner: FutureTaskRunner = defaultRunner): () => A = {
- runner.futureAsFunction(runner submit runner.functionAsTask(() => p))
- }
-
- /** Evaluates two expressions in parallel. Invoking `par` blocks the current
- * thread until both expressions have been evaluated.
- *
- * @param xp the first expression to evaluate
- * @param yp the second expression to evaluate
- *
- * @return a pair holding the evaluation results
- */
- def par[A, B](xp: => A, yp: => B)(implicit runner: TaskRunner = defaultRunner): (A, B) = {
- val y = new SyncVar[Either[Throwable, B]]
- spawn { y set tryCatch(yp) }
- (xp, getOrThrow(y.get))
- }
-
-/*
- def parMap[a,b](f: a => b, xs: Array[a]): Array[b] = {
- val results = new Array[b](xs.length);
- replicate(0, xs.length) { i => results(i) = f(xs(i)) }
- results
- }
-*/
-
-}
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 3e849f1722..50a66a622a 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -24,10 +24,10 @@ package object concurrent {
*
* @tparam T the type of the result
* @param body the asynchronous computation
- * @param execctx the execution context on which the future is run
+ * @param executor the execution context on which the future is run
* @return the `Future` holding the result of the computation
*/
- def future[T](body: =>T)(implicit execctx: ExecutionContext): Future[T] = Future[T](body)
+ def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body)
/** Creates a promise object which can be completed with a value or an exception.
*
diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala
index 70065560b1..7d20219d4d 100644
--- a/src/library/scala/deprecatedInheritance.scala
+++ b/src/library/scala/deprecatedInheritance.scala
@@ -11,7 +11,8 @@ package scala
/** An annotation that designates that inheriting from a class is deprecated.
*
* This is usually done to warn about a non-final class being made final in a future version.
- * Sub-classing such a class then generates a warning.
+ * Sub-classing such a class then generates a warning. No warnings are generated if the
+ * subclass is in the same compilation unit.
*
* @param message the message to print during compilation if the class was sub-classed
* @param since a string identifying the first version in which inheritance was deprecated
diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala
new file mode 100644
index 0000000000..6b00eb283f
--- /dev/null
+++ b/src/library/scala/io/AnsiColor.scala
@@ -0,0 +1,53 @@
+package scala
+package io
+
+trait AnsiColor {
+ /** Foreground color for ANSI black */
+ final val BLACK = "\033[30m"
+ /** Foreground color for ANSI red */
+ final val RED = "\033[31m"
+ /** Foreground color for ANSI green */
+ final val GREEN = "\033[32m"
+ /** Foreground color for ANSI yellow */
+ final val YELLOW = "\033[33m"
+ /** Foreground color for ANSI blue */
+ final val BLUE = "\033[34m"
+ /** Foreground color for ANSI magenta */
+ final val MAGENTA = "\033[35m"
+ /** Foreground color for ANSI cyan */
+ final val CYAN = "\033[36m"
+ /** Foreground color for ANSI white */
+ final val WHITE = "\033[37m"
+
+ /** Background color for ANSI black */
+ final val BLACK_B = "\033[40m"
+ /** Background color for ANSI red */
+ final val RED_B = "\033[41m"
+ /** Background color for ANSI green */
+ final val GREEN_B = "\033[42m"
+ /** Background color for ANSI yellow */
+ final val YELLOW_B = "\033[43m"
+ /** Background color for ANSI blue */
+ final val BLUE_B = "\033[44m"
+ /** Background color for ANSI magenta */
+ final val MAGENTA_B = "\033[45m"
+ /** Background color for ANSI cyan */
+ final val CYAN_B = "\033[46m"
+ /** Background color for ANSI white */
+ final val WHITE_B = "\033[47m"
+
+ /** Reset ANSI styles */
+ final val RESET = "\033[0m"
+ /** ANSI bold */
+ final val BOLD = "\033[1m"
+ /** ANSI underlines */
+ final val UNDERLINED = "\033[4m"
+ /** ANSI blink */
+ final val BLINK = "\033[5m"
+ /** ANSI reversed */
+ final val REVERSED = "\033[7m"
+ /** ANSI invisible */
+ final val INVISIBLE = "\033[8m"
+}
+
+object AnsiColor extends AnsiColor { }
diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala
index 767f06fd3f..c170d28127 100644
--- a/src/library/scala/io/BufferedSource.scala
+++ b/src/library/scala/io/BufferedSource.scala
@@ -55,7 +55,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
// immediately on the source.
if (charReaderCreated && iter.hasNext) {
val pb = new PushbackReader(charReader)
- pb unread iter.next()
+ pb unread iter.next().toInt
new BufferedReader(pb, bufferSize)
}
else charReader
@@ -73,7 +73,7 @@ class BufferedSource(inputStream: InputStream, bufferSize: Int)(implicit val cod
if (nextLine == null) lineReader.readLine
else try nextLine finally nextLine = null
}
- if (result == null) Iterator.empty.next
+ if (result == null) Iterator.empty.next()
else result
}
}
diff --git a/src/library/scala/io/BytePickle.scala b/src/library/scala/io/BytePickle.scala
deleted file mode 100644
index 2c4a0bd2da..0000000000
--- a/src/library/scala/io/BytePickle.scala
+++ /dev/null
@@ -1,318 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.io
-
-import scala.collection.mutable
-
-/**
- * Pickler combinators.
- * Based on a Haskell library by Andrew Kennedy,
- * see <a href="http://research.microsoft.com/~akenn/fun/"
- * target="_top">http://research.microsoft.com/~akenn/fun/</a>.
- *
- * @author Philipp Haller
- * @version 1.1
- */
-@deprecated("This class will be removed.", "2.10.0")
-object BytePickle {
- abstract class SPU[T] {
- def appP(a: T, state: PicklerState): PicklerState
- def appU(state: UnPicklerState): (T, UnPicklerState)
- }
-
- def pickle[T](p: SPU[T], a: T): Array[Byte] =
- p.appP(a, new PicklerState(new Array[Byte](0), new PicklerEnv)).stream
-
- def unpickle[T](p: SPU[T], stream: Array[Byte]): T =
- p.appU(new UnPicklerState(stream, new UnPicklerEnv))._1
-
- abstract class PU[T] {
- def appP(a: T, state: Array[Byte]): Array[Byte]
- def appU(state: Array[Byte]): (T, Array[Byte])
- }
-
- def upickle[T](p: PU[T], a: T): Array[Byte] =
- p.appP(a, new Array[Byte](0))
-
- def uunpickle[T](p: PU[T], stream: Array[Byte]): T =
- p.appU(stream)._1
-
- class PicklerEnv extends mutable.HashMap[Any, Int] {
- private var cnt: Int = 64
- def nextLoc() = { cnt += 1; cnt }
- }
-
- class UnPicklerEnv extends mutable.HashMap[Int, Any] {
- private var cnt: Int = 64
- def nextLoc() = { cnt += 1; cnt }
- }
-
- class PicklerState(val stream: Array[Byte], val dict: PicklerEnv)
- class UnPicklerState(val stream: Array[Byte], val dict: UnPicklerEnv)
-
- abstract class RefDef
- case class Ref() extends RefDef
- case class Def() extends RefDef
-
- def refDef: PU[RefDef] = new PU[RefDef] {
- def appP(b: RefDef, s: Array[Byte]): Array[Byte] =
- b match {
- case Ref() => Array.concat(s, Array[Byte](0))
- case Def() => Array.concat(s, Array[Byte](1))
- };
- def appU(s: Array[Byte]): (RefDef, Array[Byte]) =
- if (s(0) == (0: Byte)) (Ref(), s.slice(1, s.length))
- else (Def(), s.slice(1, s.length));
- }
-
- val REF = 0
- val DEF = 1
-
- def unat: PU[Int] = new PU[Int] {
- def appP(n: Int, s: Array[Byte]): Array[Byte] =
- Array.concat(s, nat2Bytes(n));
- def appU(s: Array[Byte]): (Int, Array[Byte]) = {
- var num = 0
- def readNat: Int = {
- var b = 0;
- var x = 0;
- do {
- b = s(num)
- num += 1
- x = (x << 7) + (b & 0x7f);
- } while ((b & 0x80) != 0);
- x
- }
- (readNat, s.slice(num, s.length))
- }
- }
-
- def share[a](pa: SPU[a]): SPU[a] = new SPU[a] {
- def appP(v: a, state: PicklerState): PicklerState = {
- /*
- - is there some value equal to v associated with a location l in the pickle environment?
- - yes: write REF-tag to outstream together with l
- - no:
- write DEF-tag to outstream
- record current location l of outstream
- --> serialize value
- add entry to pickle environment, mapping v onto l
- */
- val pe = state.dict
- pe.get(v) match {
- case None =>
- val sPrime = refDef.appP(Def(), state.stream)
- val l = pe.nextLoc()
-
- val sPrimePrime = pa.appP(v, new PicklerState(sPrime, pe))
-
- pe.update(v, l)
-
- return sPrimePrime
- case Some(l) =>
- val sPrime = refDef.appP(Ref(), state.stream)
-
- return new PicklerState(unat.appP(l, sPrime), pe)
- }
- }
- def appU(state: UnPicklerState): (a, UnPicklerState) = {
- /*
- - first, read tag (i.e. DEF or REF)
- - if REF:
- read location l
- look up resulting value in unpickler environment
- - if DEF:
- record location l of input stream
- --> deserialize value v with argument deserializer
- add entry to unpickler environment, mapping l onto v
- */
- val upe = state.dict
- val res = refDef.appU(state.stream)
- res._1 match {
- case Def() =>
- val l = upe.nextLoc
- val res2 = pa.appU(new UnPicklerState(res._2, upe))
- upe.update(l, res2._1)
- return res2
- case Ref() =>
- val res2 = unat.appU(res._2) // read location
- upe.get(res2._1) match { // lookup value in unpickler env
- case None => throw new IllegalArgumentException("invalid unpickler environment")
- case Some(v) => return (v.asInstanceOf[a], new UnPicklerState(res2._2, upe))
- }
- }
- }
- }
-
- def ulift[t](x: t): PU[t] = new PU[t] {
- def appP(a: t, state: Array[Byte]): Array[Byte] =
- if (x != a) throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x)
- else state;
- def appU(state: Array[Byte]) = (x, state)
- }
-
- def lift[t](x: t): SPU[t] = new SPU[t] {
- def appP(a: t, state: PicklerState): PicklerState =
- if (x != a) { /*throw new IllegalArgumentException("value to be pickled (" + a + ") != " + x);*/ state }
- else state;
- def appU(state: UnPicklerState) = (x, state)
- }
-
- def usequ[t,u](f: u => t, pa: PU[t], k: t => PU[u]): PU[u] = new PU[u] {
- def appP(b: u, s: Array[Byte]): Array[Byte] = {
- val a = f(b)
- val sPrime = pa.appP(a, s)
- val pb = k(a)
- val sPrimePrime = pb.appP(b, sPrime)
- sPrimePrime
- }
- def appU(s: Array[Byte]): (u, Array[Byte]) = {
- val resPa = pa.appU(s)
- val a = resPa._1
- val sPrime = resPa._2
- val pb = k(a)
- pb.appU(sPrime)
- }
- }
-
- def sequ[t,u](f: u => t, pa: SPU[t], k: t => SPU[u]): SPU[u] = new SPU[u] {
- def appP(b: u, s: PicklerState): PicklerState = {
- val a = f(b)
- val sPrime = pa.appP(a, s)
- val pb = k(a)
- pb.appP(b, sPrime)
- }
- def appU(s: UnPicklerState): (u, UnPicklerState) = {
- val resPa = pa.appU(s)
- val a = resPa._1
- val sPrime = resPa._2
- val pb = k(a)
- pb.appU(sPrime)
- }
- }
-
- def upair[a,b](pa: PU[a], pb: PU[b]): PU[(a,b)] = {
- def fst(p: (a,b)): a = p._1
- def snd(p: (a,b)): b = p._2
- usequ(fst, pa, (x: a) => usequ(snd, pb, (y: b) => ulift((x, y))))
- }
-
- def pair[a,b](pa: SPU[a], pb: SPU[b]): SPU[(a,b)] = {
- def fst(p: (a,b)): a = p._1
- def snd(p: (a,b)): b = p._2
- sequ(fst, pa, (x: a) => sequ(snd, pb, (y: b) => lift((x, y))))
- }
-
- def triple[a,b,c](pa: SPU[a], pb: SPU[b], pc: SPU[c]): SPU[(a,b,c)] = {
- def fst(p: (a,b,c)): a = p._1
- def snd(p: (a,b,c)): b = p._2
- def trd(p: (a,b,c)): c = p._3
-
- sequ(fst, pa,
- (x: a) => sequ(snd, pb,
- (y: b) => sequ(trd, pc,
- (z: c) => lift((x, y, z)))))
- }
-
- def uwrap[a,b](i: a => b, j: b => a, pa: PU[a]): PU[b] =
- usequ(j, pa, (x: a) => ulift(i(x)))
-
- def wrap[a,b](i: a => b, j: b => a, pa: SPU[a]): SPU[b] =
- sequ(j, pa, (x: a) => lift(i(x)))
-
- def appendByte(a: Array[Byte], b: Int): Array[Byte] =
- Array.concat(a, Array(b.toByte))
-
- def nat2Bytes(x: Int): Array[Byte] = {
- val buf = new mutable.ArrayBuffer[Byte]
- def writeNatPrefix(x: Int) {
- val y = x >>> 7;
- if (y != 0) writeNatPrefix(y);
- buf += ((x & 0x7f) | 0x80).asInstanceOf[Byte];
- }
- val y = x >>> 7;
- if (y != 0) writeNatPrefix(y);
- buf += (x & 0x7f).asInstanceOf[Byte];
- buf.toArray
- }
-
- def nat: SPU[Int] = new SPU[Int] {
- def appP(n: Int, s: PicklerState): PicklerState = {
- new PicklerState(Array.concat(s.stream, nat2Bytes(n)), s.dict);
- }
- def appU(s: UnPicklerState): (Int,UnPicklerState) = {
- var num = 0
- def readNat: Int = {
- var b = 0
- var x = 0
- do {
- b = s.stream(num)
- num += 1
- x = (x << 7) + (b & 0x7f);
- } while ((b & 0x80) != 0);
- x
- }
- (readNat, new UnPicklerState(s.stream.slice(num, s.stream.length), s.dict))
- }
- }
-
- def byte: SPU[Byte] = new SPU[Byte] {
- def appP(b: Byte, s: PicklerState): PicklerState =
- new PicklerState(Array.concat(s.stream, Array(b)), s.dict)
- def appU(s: UnPicklerState): (Byte, UnPicklerState) =
- (s.stream(0), new UnPicklerState(s.stream.slice(1, s.stream.length), s.dict));
- }
-
- def string: SPU[String] = share(wrap(
- (a: Array[Byte]) => (Codec fromUTF8 a).mkString,
- (s: String) => Codec toUTF8 s,
- bytearray
- ))
-
- def bytearray: SPU[Array[Byte]] = {
- wrap((l:List[Byte]) => l.toArray, (_.toList), list(byte))
- }
-
- def bool: SPU[Boolean] = {
- def toEnum(b: Boolean) = if (b) 1 else 0
- def fromEnum(n: Int) = if (n == 0) false else true
- wrap(fromEnum, toEnum, nat)
- }
-
- def ufixedList[A](pa: PU[A])(n: Int): PU[List[A]] = {
- def pairToList(p: (A, List[A])): List[A] =
- p._1 :: p._2;
- def listToPair(l: List[A]): (A, List[A]) =
- (l: @unchecked) match { case x :: xs => (x, xs) }
-
- if (n == 0) ulift(Nil)
- else
- uwrap(pairToList, listToPair, upair(pa, ufixedList(pa)(n-1)))
- }
-
- def fixedList[a](pa: SPU[a])(n: Int): SPU[List[a]] = {
- def pairToList(p: (a,List[a])): List[a] =
- p._1 :: p._2;
- def listToPair(l: List[a]): (a,List[a]) =
- (l: @unchecked) match { case x :: xs => (x, xs) }
-
- if (n == 0) lift(Nil)
- else
- wrap(pairToList, listToPair, pair(pa, fixedList(pa)(n-1)))
- }
-
- def list[a](pa: SPU[a]): SPU[List[a]] =
- sequ((l: List[a])=>l.length, nat, fixedList(pa));
-
- def ulist[a](pa: PU[a]): PU[List[a]] =
- usequ((l:List[a]) => l.length, unat, ufixedList(pa));
-
- def data[a](tag: a => Int, ps: List[()=>SPU[a]]): SPU[a] =
- sequ(tag, nat, (x: Int)=> ps.apply(x)());
-}
diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala
index 5d046e48b0..60f99199cb 100644
--- a/src/library/scala/io/Codec.scala
+++ b/src/library/scala/io/Codec.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.io
+package scala
+package io
import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
import scala.annotation.migration
@@ -43,42 +43,37 @@ class Codec(val charSet: Charset) {
override def toString = name
// these methods can be chained to configure the variables above
- def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
- def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
- def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this }
+ def onMalformedInput(newAction: Action): this.type = { _onMalformedInput = newAction ; this }
+ def onUnmappableCharacter(newAction: Action): this.type = { _onUnmappableCharacter = newAction ; this }
+ def decodingReplaceWith(newReplacement: String): this.type = { _decodingReplacement = newReplacement ; this }
def encodingReplaceWith(newReplacement: Array[Byte]): this.type = { _encodingReplacement = newReplacement ; this }
- def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this }
+ def onCodingException(handler: Handler): this.type = { _onCodingException = handler ; this }
def name = charSet.name
- def encoder =
- applyFunctions[CharsetEncoder](charSet.newEncoder(),
- (_ onMalformedInput _onMalformedInput, _onMalformedInput != null),
- (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null),
- (_ replaceWith _encodingReplacement, _encodingReplacement != null)
- )
-
- def decoder =
- applyFunctions[CharsetDecoder](charSet.newDecoder(),
- (_ onMalformedInput _onMalformedInput, _onMalformedInput != null),
- (_ onUnmappableCharacter _onUnmappableCharacter, _onUnmappableCharacter != null),
- (_ replaceWith _decodingReplacement, _decodingReplacement != null)
- )
+ def encoder: CharsetEncoder = {
+ val enc = charSet.newEncoder()
+ if (_onMalformedInput ne null) enc onMalformedInput _onMalformedInput
+ if (_onUnmappableCharacter ne null) enc onUnmappableCharacter _onUnmappableCharacter
+ if (_encodingReplacement ne null) enc replaceWith _encodingReplacement
+ enc
+ }
+ def decoder: CharsetDecoder = {
+ val dec = charSet.newDecoder()
+ if (_onMalformedInput ne null) dec onMalformedInput _onMalformedInput
+ if (_onUnmappableCharacter ne null) dec onUnmappableCharacter _onUnmappableCharacter
+ if (_decodingReplacement ne null) dec replaceWith _decodingReplacement
+ dec
+ }
def wrap(body: => Int): Int =
try body catch { case e: CharacterCodingException => _onCodingException(e) }
-
- // call a series of side effecting methods on an object, finally returning the object
- private def applyFunctions[T](x: T, fs: Configure[T]*) =
- fs.foldLeft(x)((x, pair) => pair match {
- case (f, cond) => if (cond) f(x) else x
- })
}
trait LowPriorityCodecImplicits {
self: Codec.type =>
/** The Codec of Last Resort. */
- implicit def fallbackSystemCodec: Codec = defaultCharsetCodec
+ implicit lazy val fallbackSystemCodec: Codec = defaultCharsetCodec
}
object Codec extends LowPriorityCodecImplicits {
@@ -90,9 +85,9 @@ object Codec extends LowPriorityCodecImplicits {
* the fact that you can influence anything at all via -Dfile.encoding
* as an accident, with any anomalies considered "not a bug".
*/
- def defaultCharsetCodec = apply(Charset.defaultCharset)
- def fileEncodingCodec = apply(scala.util.Properties.encodingString)
- def default = defaultCharsetCodec
+ def defaultCharsetCodec = apply(Charset.defaultCharset)
+ def fileEncodingCodec = apply(scala.util.Properties.encodingString)
+ def default = defaultCharsetCodec
def apply(encoding: String): Codec = new Codec(Charset forName encoding)
def apply(charSet: Charset): Codec = new Codec(charSet)
@@ -130,7 +125,7 @@ object Codec extends LowPriorityCodecImplicits {
bytes
}
- implicit def string2codec(s: String) = apply(s)
- implicit def charset2codec(c: Charset) = apply(c)
- implicit def decoder2codec(cd: CharsetDecoder) = apply(cd)
+ implicit def string2codec(s: String): Codec = apply(s)
+ implicit def charset2codec(c: Charset): Codec = apply(c)
+ implicit def decoder2codec(cd: CharsetDecoder): Codec = apply(cd)
}
diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala
index daa4e103be..85149223ee 100644
--- a/src/library/scala/io/Position.scala
+++ b/src/library/scala/io/Position.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.io
+package scala
+package io
/** The object Position provides convenience methods to encode
* line and column number in one single integer. The encoded line
@@ -68,14 +69,6 @@ abstract class Position {
}
object Position extends Position {
- /** The undefined position */
- @deprecated("This will be removed", "2.9.0")
- final val NOPOS = 0
-
- /** The first position in a source file */
- @deprecated("This will be removed", "2.9.0")
- final val FIRSTPOS = encode(1, 1)
-
def checkInput(line: Int, column: Int) {
if (line < 0)
throw new IllegalArgumentException(line + " < 0")
diff --git a/src/library/scala/io/ReadStdin.scala b/src/library/scala/io/ReadStdin.scala
new file mode 100644
index 0000000000..e82c26ef7a
--- /dev/null
+++ b/src/library/scala/io/ReadStdin.scala
@@ -0,0 +1,228 @@
+package scala
+package io
+
+import java.text.MessageFormat
+
+/** private[scala] because this is not functionality we should be providing
+ * in the standard library, at least not in this idiosyncractic form.
+ * Factored into trait because it is better code structure regardless.
+ */
+private[scala] trait ReadStdin {
+ import scala.Console._
+
+ /** Read a full line from the default input. Returns `null` if the end of the
+ * input stream has been reached.
+ *
+ * @return the string read from the terminal or null if the end of stream was reached.
+ */
+ def readLine(): String = in.readLine()
+
+ /** Print formatted text to the default output and read a full line from the default input.
+ * Returns `null` if the end of the input stream has been reached.
+ *
+ * @param text the format of the text to print out, as in `printf`.
+ * @param args the parameters used to instantiate the format, as in `printf`.
+ * @return the string read from the default input
+ */
+ def readLine(text: String, args: Any*): String = {
+ printf(text, args: _*)
+ readLine()
+ }
+
+ /** Reads a boolean value from an entire line of the default input.
+ * Has a fairly liberal interpretation of the input.
+ *
+ * @return the boolean value read, or false if it couldn't be converted to a boolean
+ * @throws java.io.EOFException if the end of the input stream has been reached.
+ */
+ def readBoolean(): Boolean = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toLowerCase() match {
+ case "true" => true
+ case "t" => true
+ case "yes" => true
+ case "y" => true
+ case _ => false
+ }
+ }
+
+ /** Reads a byte value from an entire line of the default input.
+ *
+ * @return the Byte that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Byte
+ */
+ def readByte(): Byte = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toByte
+ }
+
+ /** Reads a short value from an entire line of the default input.
+ *
+ * @return the short that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Short
+ */
+ def readShort(): Short = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toShort
+ }
+
+ /** Reads a char value from an entire line of the default input.
+ *
+ * @return the Char that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.StringIndexOutOfBoundsException if the line read from default input was empty
+ */
+ def readChar(): Char = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s charAt 0
+ }
+
+ /** Reads an int value from an entire line of the default input.
+ *
+ * @return the Int that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to an Int
+ */
+ def readInt(): Int = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toInt
+ }
+
+ /** Reads an long value from an entire line of the default input.
+ *
+ * @return the Long that was read
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Long
+ */
+ def readLong(): Long = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toLong
+ }
+
+ /** Reads a float value from an entire line of the default input.
+ * @return the Float that was read.
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ *
+ */
+ def readFloat(): Float = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toFloat
+ }
+
+ /** Reads a double value from an entire line of the default input.
+ *
+ * @return the Double that was read.
+ * @throws java.io.EOFException if the end of the
+ * input stream has been reached.
+ * @throws java.lang.NumberFormatException if the value couldn't be converted to a Float
+ */
+ def readDouble(): Double = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ s.toDouble
+ }
+
+ /** Reads in some structured input (from the default input), specified by
+ * a format specifier. See class `java.text.MessageFormat` for details of
+ * the format specification.
+ *
+ * @param format the format of the input.
+ * @return a list of all extracted values.
+ * @throws java.io.EOFException if the end of the input stream has been
+ * reached.
+ */
+ def readf(format: String): List[Any] = {
+ val s = readLine()
+ if (s == null)
+ throw new java.io.EOFException("Console has reached end of input")
+ else
+ textComponents(new MessageFormat(format).parse(s))
+ }
+
+ /** Reads in some structured input (from the default input), specified by
+ * a format specifier, returning only the first value extracted, according
+ * to the format specification.
+ *
+ * @param format format string, as accepted by `readf`.
+ * @return The first value that was extracted from the input
+ */
+ def readf1(format: String): Any = readf(format).head
+
+ /** Reads in some structured input (from the default input), specified
+ * by a format specifier, returning only the first two values extracted,
+ * according to the format specification.
+ *
+ * @param format format string, as accepted by `readf`.
+ * @return A [[scala.Tuple2]] containing the first two values extracted
+ */
+ def readf2(format: String): (Any, Any) = {
+ val res = readf(format)
+ (res.head, res.tail.head)
+ }
+
+ /** Reads in some structured input (from the default input), specified
+ * by a format specifier, returning only the first three values extracted,
+ * according to the format specification.
+ *
+ * @param format format string, as accepted by `readf`.
+ * @return A [[scala.Tuple3]] containing the first three values extracted
+ */
+ def readf3(format: String): (Any, Any, Any) = {
+ val res = readf(format)
+ (res.head, res.tail.head, res.tail.tail.head)
+ }
+
+ private def textComponents(a: Array[AnyRef]): List[Any] = {
+ var i: Int = a.length - 1
+ var res: List[Any] = Nil
+ while (i >= 0) {
+ res = (a(i) match {
+ case x: java.lang.Boolean => x.booleanValue()
+ case x: java.lang.Byte => x.byteValue()
+ case x: java.lang.Short => x.shortValue()
+ case x: java.lang.Character => x.charValue()
+ case x: java.lang.Integer => x.intValue()
+ case x: java.lang.Long => x.longValue()
+ case x: java.lang.Float => x.floatValue()
+ case x: java.lang.Double => x.doubleValue()
+ case x => x
+ }) :: res
+ i -= 1
+ }
+ res
+ }
+}
+
+object ReadStdin extends ReadStdin { }
diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala
index b13729aefe..74c3e06839 100644
--- a/src/library/scala/io/Source.scala
+++ b/src/library/scala/io/Source.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.io
+package scala
+package io
import scala.collection.AbstractIterator
import java.io.{ FileInputStream, InputStream, PrintStream, File => JFile }
@@ -194,11 +195,11 @@ abstract class Source extends Iterator[Char] {
lazy val iter: BufferedIterator[Char] = Source.this.iter.buffered
def isNewline(ch: Char) = ch == '\r' || ch == '\n'
def getc() = iter.hasNext && {
- val ch = iter.next
+ val ch = iter.next()
if (ch == '\n') false
else if (ch == '\r') {
if (iter.hasNext && iter.head == '\n')
- iter.next
+ iter.next()
false
}
@@ -209,7 +210,7 @@ abstract class Source extends Iterator[Char] {
}
def hasNext = iter.hasNext
def next = {
- sb.clear
+ sb.clear()
while (getc()) { }
sb.toString
}
@@ -227,7 +228,7 @@ abstract class Source extends Iterator[Char] {
/** Returns next character.
*/
- def next(): Char = positioner.next
+ def next(): Char = positioner.next()
class Positioner(encoder: Position) {
def this() = this(RelaxedPosition)
@@ -245,7 +246,7 @@ abstract class Source extends Iterator[Char] {
var tabinc = 4
def next(): Char = {
- ch = iter.next
+ ch = iter.next()
pos = encoder.encode(cline, ccol)
ch match {
case '\n' =>
@@ -267,7 +268,7 @@ abstract class Source extends Iterator[Char] {
}
object RelaxedPositioner extends Positioner(RelaxedPosition) { }
object NoPositioner extends Positioner(Position) {
- override def next(): Char = iter.next
+ override def next(): Char = iter.next()
}
def ch = positioner.ch
def pos = positioner.pos
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
deleted file mode 100644
index e4c2145153..0000000000
--- a/src/library/scala/io/UTF8Codec.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.io
-
-/**
- * @author Martin Odersky
- * @version 1.0, 04/10/2004
- */
-@deprecated("This class will be removed.", "2.10.0")
-object UTF8Codec {
- final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
- final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
-
- // Note, from http://unicode.org/faq/utf_bom.html#utf8-5
- //
- // A different issue arises if an unpaired surrogate is encountered when converting
- // ill-formed UTF-16 data. By represented such an unpaired surrogate on its own as a
- // 3-byte sequence, the resulting UTF-8 data stream would become ill-formed.
- // While it faithfully reflects the nature of the input, Unicode conformance
- // requires that encoding form conversion always results in valid data stream.
- // Therefore a converter must treat this as an error.
- //
- // Some useful locations:
- // http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
-}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 7c14ed3a9e..00a3686585 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -7,7 +7,8 @@
\* */
-package scala.math
+package scala
+package math
import java.{ lang => jl }
import java.math.{ MathContext, BigDecimal => BigDec }
@@ -25,12 +26,6 @@ object BigDecimal {
private val maxCached = 512
val defaultMathContext = MathContext.DECIMAL128
- @deprecated("Use Long.MinValue", "2.9.0")
- val MinLong = new BigDecimal(BigDec valueOf Long.MinValue, defaultMathContext)
-
- @deprecated("Use Long.MaxValue", "2.9.0")
- val MaxLong = new BigDecimal(BigDec valueOf Long.MaxValue, defaultMathContext)
-
/** Cache ony for defaultMathContext using BigDecimals in a small range. */
private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1)
@@ -49,6 +44,7 @@ object BigDecimal {
*/
def valueOf(d: Double): BigDecimal = apply(BigDec valueOf d)
def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc)
+ def valueOf(x: Long): BigDecimal = apply(x.toDouble)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified `Integer` value.
@@ -61,10 +57,10 @@ object BigDecimal {
if (mc == defaultMathContext && minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
- if (n eq null) { n = new BigDecimal(BigDec.valueOf(i), mc); cache(offset) = n }
+ if (n eq null) { n = new BigDecimal(BigDec.valueOf(i.toLong), mc); cache(offset) = n }
n
}
- else new BigDecimal(BigDec.valueOf(i), mc)
+ else new BigDecimal(BigDec.valueOf(i.toLong), mc)
/** Constructs a `BigDecimal` whose value is equal to that of the
* specified long value.
@@ -104,6 +100,9 @@ object BigDecimal {
def apply(d: Double, mc: MathContext): BigDecimal =
new BigDecimal(new BigDec(jl.Double.toString(d), mc), mc)
+ def apply(x: Float): BigDecimal = apply(x.toDouble)
+ def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc)
+
/** Translates a character array representation of a `BigDecimal`
* into a `BigDecimal`.
*/
@@ -177,7 +176,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
* with unequal hashCodes.
*/
override def hashCode(): Int =
- if (isWhole) unifiedPrimitiveHashcode
+ if (isWhole()) unifiedPrimitiveHashcode()
else doubleValue.##
/** Compares this BigDecimal with the specified value for equality.
@@ -198,7 +197,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
*/
def isValidFloat = {
val f = toFloat
- !f.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(f)) == 0
+ !f.isInfinity && bigDecimal.compareTo(new java.math.BigDecimal(f.toDouble)) == 0
}
/** Returns `true` iff this can be represented exactly by [[scala.Double]]; otherwise returns `false`.
*/
@@ -339,21 +338,21 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
override def byteValue = intValue.toByte
/** Converts this BigDecimal to a Short.
- * If the BigDecimal is too big to fit in a Byte, only the low-order 16 bits are returned.
+ * If the BigDecimal is too big to fit in a Short, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value as well as return a result with the opposite sign.
*/
override def shortValue = intValue.toShort
/** Converts this BigDecimal to a Char.
- * If the BigDecimal is too big to fit in a char, only the low-order 16 bits are returned.
+ * If the BigDecimal is too big to fit in a Char, only the low-order 16 bits are returned.
* Note that this conversion can lose information about the overall magnitude of the
* BigDecimal value and that it always returns a positive result.
*/
def charValue = intValue.toChar
/** Converts this BigDecimal to an Int.
- * If the BigDecimal is too big to fit in a char, only the low-order 32 bits
+ * If the BigDecimal is too big to fit in an Int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
@@ -361,7 +360,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable {
def intValue = this.bigDecimal.intValue
/** Converts this BigDecimal to a Long.
- * If the BigDecimal is too big to fit in a char, only the low-order 64 bits
+ * If the BigDecimal is too big to fit in a Long, only the low-order 64 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigDecimal value as well as return a result with
* the opposite sign.
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index fdba0ec716..afc4d5c535 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.math.BigInteger
import scala.language.implicitConversions
@@ -23,12 +24,6 @@ object BigInt {
private val cache = new Array[BigInt](maxCached - minCached + 1)
private val minusOne = BigInteger.valueOf(-1)
- @deprecated("Use Long.MinValue", "2.9.0")
- val MinLong = BigInt(Long.MinValue)
-
- @deprecated("Use Long.MaxValue", "2.9.0")
- val MaxLong = BigInt(Long.MaxValue)
-
/** Constructs a `BigInt` whose value is equal to that of the
* specified integer value.
*
@@ -39,9 +34,9 @@ object BigInt {
if (minCached <= i && i <= maxCached) {
val offset = i - minCached
var n = cache(offset)
- if (n eq null) { n = new BigInt(BigInteger.valueOf(i)); cache(offset) = n }
+ if (n eq null) { n = new BigInt(BigInteger.valueOf(i.toLong)); cache(offset) = n }
n
- } else new BigInt(BigInteger.valueOf(i))
+ } else new BigInt(BigInteger.valueOf(i.toLong))
/** Constructs a `BigInt` whose value is equal to that of the
* specified long value.
@@ -118,7 +113,7 @@ object BigInt {
class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
- if (isValidLong) unifiedPrimitiveHashcode
+ if (isValidLong) unifiedPrimitiveHashcode()
else bigInteger.##
/** Compares this BigInt with the specified value for equality.
@@ -295,9 +290,6 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
*/
def signum: Int = this.bigInteger.signum()
- @deprecated("Use ~bigInt (the unary_~ method) instead", "2.10.0")
- def ~ : BigInt = ~this
-
/** Returns the bitwise complement of this BigInt
*/
def unary_~ : BigInt = new BigInt(this.bigInteger.not())
@@ -364,7 +356,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
def charValue = intValue.toChar
/** Converts this BigInt to an <tt>int</tt>.
- * If the BigInt is too big to fit in a int, only the low-order 32 bits
+ * If the BigInt is too big to fit in an int, only the low-order 32 bits
* are returned. Note that this conversion can lose information about the
* overall magnitude of the BigInt value as well as return a result with
* the opposite sign.
diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala
index 5f5e049941..45b2b3629d 100644
--- a/src/library/scala/math/Equiv.scala
+++ b/src/library/scala/math/Equiv.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import java.util.Comparator
diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala
index ca33675b0a..b7e0ed5471 100644
--- a/src/library/scala/math/Fractional.scala
+++ b/src/library/scala/math/Fractional.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala
index f3684c4e5d..ff1f695f6d 100644
--- a/src/library/scala/math/Integral.scala
+++ b/src/library/scala/math/Integral.scala
@@ -8,7 +8,8 @@
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala
index 5a76f4f5f2..e6644c0dfc 100644
--- a/src/library/scala/math/Numeric.scala
+++ b/src/library/scala/math/Numeric.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
@@ -50,9 +51,9 @@ object Numeric {
def negate(x: Int): Int = -x
def fromInt(x: Int): Int = x
def toInt(x: Int): Int = x
- def toLong(x: Int): Long = x
- def toFloat(x: Int): Float = x
- def toDouble(x: Int): Double = x
+ def toLong(x: Int): Long = x.toLong
+ def toFloat(x: Int): Float = x.toFloat
+ def toDouble(x: Int): Double = x.toDouble
}
implicit object IntIsIntegral extends IntIsIntegral with Ordering.IntOrdering
@@ -108,11 +109,11 @@ object Numeric {
def quot(x: Long, y: Long): Long = x / y
def rem(x: Long, y: Long): Long = x % y
def negate(x: Long): Long = -x
- def fromInt(x: Int): Long = x
+ def fromInt(x: Int): Long = x.toLong
def toInt(x: Long): Int = x.toInt
def toLong(x: Long): Long = x
- def toFloat(x: Long): Float = x
- def toDouble(x: Long): Double = x
+ def toFloat(x: Long): Float = x.toFloat
+ def toDouble(x: Long): Double = x.toDouble
}
implicit object LongIsIntegral extends LongIsIntegral with Ordering.LongOrdering
@@ -121,11 +122,11 @@ object Numeric {
def minus(x: Float, y: Float): Float = x - y
def times(x: Float, y: Float): Float = x * y
def negate(x: Float): Float = -x
- def fromInt(x: Int): Float = x
+ def fromInt(x: Int): Float = x.toFloat
def toInt(x: Float): Int = x.toInt
def toLong(x: Float): Long = x.toLong
def toFloat(x: Float): Float = x
- def toDouble(x: Float): Double = x
+ def toDouble(x: Float): Double = x.toDouble
}
trait FloatIsFractional extends FloatIsConflicted with Fractional[Float] {
def div(x: Float, y: Float): Float = x / y
@@ -143,7 +144,7 @@ object Numeric {
def minus(x: Double, y: Double): Double = x - y
def times(x: Double, y: Double): Double = x * y
def negate(x: Double): Double = -x
- def fromInt(x: Int): Double = x
+ def fromInt(x: Int): Double = x.toDouble
def toInt(x: Double): Int = x.toInt
def toLong(x: Double): Long = x.toLong
def toFloat(x: Double): Float = x.toFloat
diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala
index e8be92eb4a..51f2765a63 100644
--- a/src/library/scala/math/Ordered.scala
+++ b/src/library/scala/math/Ordered.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
import scala.language.implicitConversions
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 11b12050c9..d1a4e7c35c 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -26,14 +26,14 @@ import scala.language.{implicitConversions, higherKinds}
* val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3))
*
* // sort by 2nd element
- * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2))
+ * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)
*
* // sort by the 3rd element, then 1st
* Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1)))
* }}}
*
* An Ordering[T] is implemented by specifying compare(a:T, b:T), which
- * decides how to order to instances a and b. Instances of Ordering[T] can be
+ * decides how to order two instances a and b. Instances of Ordering[T] can be
* used by things like scala.util.Sorting to sort collections like Array[T].
*
* For example:
@@ -173,7 +173,7 @@ object Ordering extends LowPriorityOrderingImplicits {
val ye = y.iterator
while (xe.hasNext && ye.hasNext) {
- val res = ord.compare(xe.next, ye.next)
+ val res = ord.compare(xe.next(), ye.next())
if (res != 0) return res
}
@@ -347,7 +347,7 @@ object Ordering extends LowPriorityOrderingImplicits {
val ye = y.iterator
while (xe.hasNext && ye.hasNext) {
- val res = ord.compare(xe.next, ye.next)
+ val res = ord.compare(xe.next(), ye.next())
if (res != 0) return res
}
diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala
index a9e317d536..9e35381528 100644
--- a/src/library/scala/math/PartialOrdering.scala
+++ b/src/library/scala/math/PartialOrdering.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.math
+package scala
+package math
/** A trait for representing partial orderings. It is important to
* distinguish between a type that has a partial order and a representation
diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala
index 7823e5b396..f58210d6a7 100644
--- a/src/library/scala/math/PartiallyOrdered.scala
+++ b/src/library/scala/math/PartiallyOrdered.scala
@@ -8,7 +8,8 @@
-package scala.math
+package scala
+package math
/** A class for partially ordered data.
*
diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java
index 7345147b0d..f03ba7bf08 100644
--- a/src/library/scala/math/ScalaNumber.java
+++ b/src/library/scala/math/ScalaNumber.java
@@ -6,8 +6,6 @@
** |/ **
\* */
-
-
package scala.math;
/** A marker class for Number types introduced by Scala
diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala
index 6ddf48d03b..336991781e 100644
--- a/src/library/scala/math/ScalaNumericConversions.scala
+++ b/src/library/scala/math/ScalaNumericConversions.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-package scala.math
-
-import java.{ lang => jl }
+package scala
+package math
/** A slightly more specific conversion trait for classes which
* extend ScalaNumber (which excludes value classes.)
@@ -34,37 +33,37 @@ trait ScalaNumericAnyConversions extends Any {
/** Returns the value of this as a [[scala.Char]]. This may involve
* rounding or truncation.
*/
- def toChar = intValue.toChar
+ def toChar = intValue().toChar
/** Returns the value of this as a [[scala.Byte]]. This may involve
* rounding or truncation.
*/
- def toByte = byteValue
+ def toByte = byteValue()
/** Returns the value of this as a [[scala.Short]]. This may involve
* rounding or truncation.
*/
- def toShort = shortValue
+ def toShort = shortValue()
/** Returns the value of this as an [[scala.Int]]. This may involve
* rounding or truncation.
*/
- def toInt = intValue
+ def toInt = intValue()
/** Returns the value of this as a [[scala.Long]]. This may involve
* rounding or truncation.
*/
- def toLong = longValue
+ def toLong = longValue()
/** Returns the value of this as a [[scala.Float]]. This may involve
* rounding or truncation.
*/
- def toFloat = floatValue
+ def toFloat = floatValue()
/** Returns the value of this as a [[scala.Double]]. This may involve
* rounding or truncation.
*/
- def toDouble = doubleValue
+ def toDouble = doubleValue()
/** Returns `true` iff this has a zero fractional part, and is within the
* range of [[scala.Byte]] MinValue and MaxValue; otherwise returns `false`.
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
index cb033bda2c..af5dad5c38 100644
--- a/src/library/scala/math/package.scala
+++ b/src/library/scala/math/package.scala
@@ -16,12 +16,12 @@ package object math {
/** The `double` value that is closer than any other to `e`, the base of
* the natural logarithms.
*/
- val E = java.lang.Math.E
+ @inline final val E = java.lang.Math.E
/** The `double` value that is closer than any other to `pi`, the ratio of
* the circumference of a circle to its diameter.
*/
- val Pi = java.lang.Math.PI
+ @inline final val Pi = java.lang.Math.PI
/** Returns a `double` value with a positive sign, greater than or equal
* to `0.0` and less than `1.0`.
@@ -62,7 +62,7 @@ package object math {
def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
- def ceil(x: Double): Double = java.lang.Math.ceil(x)
+ def ceil(x: Double): Double = java.lang.Math.ceil(x)
def floor(x: Double): Double = java.lang.Math.floor(x)
/** Returns the `double` value that is closest in value to the
@@ -100,24 +100,30 @@ package object math {
*/
def round(x: Float): Int = java.lang.Math.round(x)
def round(x: Double): Long = java.lang.Math.round(x)
- def abs(x: Int): Int = java.lang.Math.abs(x)
- def abs(x: Long): Long = java.lang.Math.abs(x)
- def abs(x: Float): Float = java.lang.Math.abs(x)
+
+ def abs(x: Int): Int = java.lang.Math.abs(x)
+ def abs(x: Long): Long = java.lang.Math.abs(x)
+ def abs(x: Float): Float = java.lang.Math.abs(x)
def abs(x: Double): Double = java.lang.Math.abs(x)
- def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
- def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
- def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+ def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
+ def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
+ def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
- def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
- def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
- def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
+ def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
+ def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
+ def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
- def signum(x: Int): Int = java.lang.Integer.signum(x)
- def signum(x: Long): Long = java.lang.Long.signum(x)
- def signum(x: Float): Float = java.lang.Math.signum(x)
+ /** Note that these are not pure forwarders to the java versions.
+ * In particular, the return type of java.lang.Long.signum is Int,
+ * but here it is widened to Long so that each overloaded variant
+ * will return the same numeric type it is passed.
+ */
+ def signum(x: Int): Int = java.lang.Integer.signum(x)
+ def signum(x: Long): Long = java.lang.Long.signum(x)
+ def signum(x: Float): Float = java.lang.Math.signum(x)
def signum(x: Double): Double = java.lang.Math.signum(x)
// -----------------------------------------------------------------------
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 84f6f0be9c..224112c11c 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -34,12 +34,6 @@ package object scala {
override def toString = "object AnyRef"
}
- @deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0")
- type serializable = annotation.serializable
-
- @deprecated("instead of `@cloneable class C`, use `class C extends Cloneable`", "2.10.0")
- type cloneable = annotation.cloneable
-
type TraversableOnce[+A] = scala.collection.TraversableOnce[A]
type Traversable[+A] = scala.collection.Traversable[A]
@@ -95,7 +89,10 @@ package object scala {
val Equiv = scala.math.Equiv
type Fractional[T] = scala.math.Fractional[T]
+ val Fractional = scala.math.Fractional
+
type Integral[T] = scala.math.Integral[T]
+ val Integral = scala.math.Integral
type Numeric[T] = scala.math.Numeric[T]
val Numeric = scala.math.Numeric
@@ -121,14 +118,12 @@ package object scala {
// Annotations which we might move to annotation.*
/*
type SerialVersionUID = annotation.SerialVersionUID
- type cloneable = annotation.cloneable
type deprecated = annotation.deprecated
type deprecatedName = annotation.deprecatedName
type inline = annotation.inline
type native = annotation.native
- type noinline = noannotation.inline
+ type noinline = annotation.noinline
type remote = annotation.remote
- type serializable = annotation.serializable
type specialized = annotation.specialized
type transient = annotation.transient
type throws = annotation.throws
diff --git a/src/library/scala/parallel/Future.scala b/src/library/scala/parallel/Future.scala
deleted file mode 100644
index e255a5772b..0000000000
--- a/src/library/scala/parallel/Future.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.parallel
-
-
-
-/** A future is a function without parameters that will block the caller if
- * the parallel computation associated with the function is not completed.
- *
- * @tparam R the type of the result
- *
- * @since 2.9
- */
-@deprecated("Use `scala.concurrent.Future` instead.", "2.10.0")
-trait Future[@specialized +R] extends (() => R) {
- /** Returns a result once the parallel computation completes. If the
- * computation produced an exception, an exception is forwarded.
- *
- * '''Note:''' creating a circular dependency between futures by calling
- * this method will result in a deadlock.
- *
- * @return the result
- * @throws the exception that was thrown during a parallel computation
- */
- def apply(): R
-
- /** Returns `true` if the parallel computation is completed.
- *
- * @return `true` if the parallel computation is completed, `false` otherwise
- */
- def isDone(): Boolean
-}
-
diff --git a/src/library/scala/parallel/package.scala.disabled b/src/library/scala/parallel/package.scala.disabled
deleted file mode 100644
index 45f5470d03..0000000000
--- a/src/library/scala/parallel/package.scala.disabled
+++ /dev/null
@@ -1,178 +0,0 @@
-package scala
-
-
-
-import scala.concurrent.forkjoin._
-
-
-/** This package object contains various parallel operations.
- *
- * @define invokingPar
- * Invoking a parallel computation creates a future which will
- * hold the result of the computation once it completes. Querying
- * the result of a future before its parallel computation has completed
- * will block the caller. For all practical concerns, the dependency
- * chain obtained by querying results of unfinished futures can have
- * arbitrary lengths. However, care must be taken not to create a
- * circular dependency, as this will result in a deadlock.
- *
- * Additionally, if the parallel computation performs a blocking call
- * (e.g. an I/O operation or waiting for a lock) other than waiting for a future,
- * it should do so by invoking the `block` method. This is another
- * form of waiting that could potentially create a circular dependency,
- * an the user should take care not to do this.
- *
- * Users should be aware that invoking a parallel computation has a
- * certain overhead. Parallel computations should not be invoked for
- * small computations, as this can lead to bad performance. A rule of the
- * thumb is having parallel computations equivalent to a loop
- * with 50000 arithmetic operations (at least). If a parallel computation
- * is invoked within another parallel computation, then it should be
- * computationally equivalent to a loop with 10000 arithmetic operations.
- */
-package object parallel {
-
- private[scala] val forkjoinpool = new ForkJoinPool()
-
- private class Task[T](body: =>T) extends RecursiveTask[T] with Future[T] {
- def compute = body
- def apply() = join()
- }
-
- private final def newTask[T](body: =>T) = new Task[T](body)
-
- private final def executeTask[T](task: RecursiveTask[T]) {
- if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) task.fork
- else forkjoinpool.execute(task)
- }
-
- /* public methods */
-
- /** Performs a call which can potentially block execution.
- *
- * Example:
- * {{{
- * val lock = new ReentrantLock
- *
- * // ... do something ...
- *
- * blocking {
- * if (!lock.hasLock) lock.lock()
- * }
- * }}}
- *
- * '''Note:''' calling methods that wait arbitrary amounts of time
- * (e.g. for I/O operations or locks) may severely decrease performance
- * or even result in deadlocks. This does not include waiting for
- * results of futures.
- */
- def blocking[T](body: =>T): T = {
- if (Thread.currentThread().isInstanceOf[ForkJoinWorkerThread]) {
- val blocker = new ForkJoinPool.ManagedBlocker {
- @volatile var done = false
- @volatile var result: Any = _
- def block() = {
- result = body
- done = true
- true
- }
- def isReleasable() = done
- }
- ForkJoinPool.managedBlock(blocker, true)
- blocker.result.asInstanceOf[T]
- } else body
- }
-
- /** Starts a parallel computation and returns a future.
- *
- * $invokingPar
- *
- * @tparam T the type of the result of the parallel computation
- * @param body the computation to be invoked in parallel
- * @return a future with the result
- */
- def par[T](body: =>T): Future[T] = {
- val task = newTask(body)
- executeTask(task)
- task
- }
-
- /** Starts 2 parallel computations and returns a future.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @return a tuple of futures corresponding to parallel computations
- */
- def par[T1, T2](b1: =>T1, b2: =>T2): (Future[T1], Future[T2]) = {
- val t1 = newTask(b1)
- executeTask(t1)
- val t2 = newTask(b2)
- executeTask(t2)
- (t1, t2)
- }
-
- /** Starts 3 parallel computations and returns a future.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @return a tuple of futures corresponding to parallel computations
- */
- def par[T1, T2, T3](b1: =>T1, b2: =>T2, b3: =>T3): (Future[T1], Future[T2], Future[T3]) = {
- val t1 = newTask(b1)
- executeTask(t1)
- val t2 = newTask(b2)
- executeTask(t2)
- val t3 = newTask(b3)
- executeTask(t3)
- (t1, t2, t3)
- }
-
- /** Starts 4 parallel computations and returns a future.
- *
- * $invokingPar
- *
- * @tparam T1 the type of the result of 1st the parallel computation
- * @tparam T2 the type of the result of 2nd the parallel computation
- * @tparam T3 the type of the result of 3rd the parallel computation
- * @tparam T4 the type of the result of 4th the parallel computation
- * @param b1 the 1st computation to be invoked in parallel
- * @param b2 the 2nd computation to be invoked in parallel
- * @param b3 the 3rd computation to be invoked in parallel
- * @param b4 the 4th computation to be invoked in parallel
- * @return a tuple of futures corresponding to parallel computations
- */
- def par[T1, T2, T3, T4](b1: =>T1, b2: =>T2, b3: =>T3, b4: =>T4): (Future[T1], Future[T2], Future[T3], Future[T4]) = {
- val t1 = newTask(b1)
- executeTask(t1)
- val t2 = newTask(b2)
- executeTask(t2)
- val t3 = newTask(b3)
- executeTask(t3)
- val t4 = newTask(b4)
- executeTask(t4)
- (t1, t2, t3, t4)
- }
-
-}
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala
index b414db6e97..e4ce667981 100644
--- a/src/library/scala/ref/SoftReference.scala
+++ b/src/library/scala/ref/SoftReference.scala
@@ -13,7 +13,8 @@ package scala.ref
* @author Sean McDirmid
*/
class SoftReference[+T <: AnyRef](value : T, queue : ReferenceQueue[T]) extends ReferenceWrapper[T] {
- def this(value : T) = this(value, null);
+ def this(value : T) = this(value, null)
+
val underlying: java.lang.ref.SoftReference[_ <: T] =
new SoftReferenceWithWrapper[T](value, queue, this)
}
diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
index 0a3d818fb9..798746851a 100644
--- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
+++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
import scala.collection.mutable.{ WrappedArray, ArrayBuilder }
import java.lang.{ Class => jClass }
diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala
index f62d0ecd16..3bc76da295 100644
--- a/src/library/scala/reflect/Manifest.scala
+++ b/src/library/scala/reflect/Manifest.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
import scala.collection.mutable.{ ArrayBuilder, WrappedArray }
diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala
index 384ebc6134..a8430548f5 100755
--- a/src/library/scala/reflect/NameTransformer.scala
+++ b/src/library/scala/reflect/NameTransformer.scala
@@ -15,9 +15,12 @@ package reflect
object NameTransformer {
// XXX Short term: providing a way to alter these without having to recompile
// the compiler before recompiling the compiler.
- val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
- val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
- val MODULE_INSTANCE_NAME = "MODULE$"
+ val MODULE_SUFFIX_STRING = sys.props.getOrElse("SCALA_MODULE_SUFFIX_STRING", "$")
+ val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$")
+ val MODULE_INSTANCE_NAME = "MODULE$"
+ val LOCAL_SUFFIX_STRING = " "
+ val SETTER_SUFFIX_STRING = "_$eq"
+ val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
private val nops = 128
private val ncodes = 26 * 26
@@ -27,9 +30,9 @@ object NameTransformer {
private val op2code = new Array[String](nops)
private val code2op = new Array[OpCodes](ncodes)
private def enterOp(op: Char, code: String) = {
- op2code(op) = code
+ op2code(op.toInt) = code
val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a'
- code2op(c) = new OpCodes(op, code, code2op(c))
+ code2op(c.toInt) = new OpCodes(op, code, code2op(c))
}
/* Note: decoding assumes opcodes are only ever lowercase. */
@@ -63,12 +66,12 @@ object NameTransformer {
var i = 0
while (i < len) {
val c = name charAt i
- if (c < nops && (op2code(c) ne null)) {
+ if (c < nops && (op2code(c.toInt) ne null)) {
if (buf eq null) {
buf = new StringBuilder()
buf.append(name.substring(0, i))
}
- buf.append(op2code(c))
+ buf.append(op2code(c.toInt))
/* Handle glyphs that are not valid Java/JVM identifiers */
}
else if (!Character.isJavaIdentifierPart(c)) {
@@ -93,8 +96,8 @@ object NameTransformer {
*/
def decode(name0: String): String = {
//System.out.println("decode: " + name);//DEBUG
- val name = if (name0.endsWith("<init>")) name0.substring(0, name0.length() - ("<init>").length()) + "this"
- else name0;
+ val name = if (name0.endsWith("<init>")) name0.stripSuffix("<init>") + "this"
+ else name0
var buf: StringBuilder = null
val len = name.length()
var i = 0
diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala
index 61bc5e28d3..2ef946c80c 100644
--- a/src/library/scala/reflect/NoManifest.scala
+++ b/src/library/scala/reflect/NoManifest.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** One of the branches of an [[scala.reflect.OptManifest]].
*/
diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala
index 5e373c7318..b69f55483c 100644
--- a/src/library/scala/reflect/OptManifest.scala
+++ b/src/library/scala/reflect/OptManifest.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
/** A `OptManifest[T]` is an optional [[scala.reflect.Manifest]].
*
diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala
index 10e6d7d9a4..74c1f2172c 100644
--- a/src/library/scala/reflect/package.scala
+++ b/src/library/scala/reflect/package.scala
@@ -1,5 +1,7 @@
package scala
+import java.lang.reflect.{ AccessibleObject => jAccessibleObject }
+
package object reflect {
// in the new scheme of things ClassManifests are aliased to ClassTags
@@ -42,11 +44,23 @@ package object reflect {
def classTag[T](implicit ctag: ClassTag[T]) = ctag
+ /** Make a java reflection object accessible, if it is not already
+ * and it is possible to do so. If a SecurityException is thrown in the
+ * attempt, it is caught and discarded.
+ */
+ def ensureAccessible[T <: jAccessibleObject](m: T): T = {
+ if (!m.isAccessible) {
+ try m setAccessible true
+ catch { case _: SecurityException => } // does nothing
+ }
+ m
+ }
+
// anchor for the class tag materialization macro emitted during tag materialization in Implicits.scala
// implementation is hardwired into `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
// todo. once we have implicit macros for tag generation, we can remove this anchor
- private[scala] def materializeClassTag[T](): ClassTag[T] = ??? // macro
+ private[scala] def materializeClassTag[T](): ClassTag[T] = macro ???
@deprecated("Use `@scala.beans.BeanDescription` instead", "2.10.0")
type BeanDescription = scala.beans.BeanDescription
diff --git a/src/library/scala/runtime/AbstractFunction0.scala b/src/library/scala/runtime/AbstractFunction0.scala
index 1b351c62ae..1e677e8008 100644
--- a/src/library/scala/runtime/AbstractFunction0.scala
+++ b/src/library/scala/runtime/AbstractFunction0.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala
index a68a82e6a2..8d68017a6f 100644
--- a/src/library/scala/runtime/AbstractFunction1.scala
+++ b/src/library/scala/runtime/AbstractFunction1.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction10.scala b/src/library/scala/runtime/AbstractFunction10.scala
index 72c0a2e69d..776f52238d 100644
--- a/src/library/scala/runtime/AbstractFunction10.scala
+++ b/src/library/scala/runtime/AbstractFunction10.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction11.scala b/src/library/scala/runtime/AbstractFunction11.scala
index 031f3044a1..76cd8fbb3c 100644
--- a/src/library/scala/runtime/AbstractFunction11.scala
+++ b/src/library/scala/runtime/AbstractFunction11.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction12.scala b/src/library/scala/runtime/AbstractFunction12.scala
index 9823edbc60..10066ed4b3 100644
--- a/src/library/scala/runtime/AbstractFunction12.scala
+++ b/src/library/scala/runtime/AbstractFunction12.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction13.scala b/src/library/scala/runtime/AbstractFunction13.scala
index 528719b216..6c3a45734c 100644
--- a/src/library/scala/runtime/AbstractFunction13.scala
+++ b/src/library/scala/runtime/AbstractFunction13.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction14.scala b/src/library/scala/runtime/AbstractFunction14.scala
index ecae45a107..bf2b6736f4 100644
--- a/src/library/scala/runtime/AbstractFunction14.scala
+++ b/src/library/scala/runtime/AbstractFunction14.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction15.scala b/src/library/scala/runtime/AbstractFunction15.scala
index 5f5e8afcf7..5136f666c8 100644
--- a/src/library/scala/runtime/AbstractFunction15.scala
+++ b/src/library/scala/runtime/AbstractFunction15.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction16.scala b/src/library/scala/runtime/AbstractFunction16.scala
index c0093c4fbb..dbafab8301 100644
--- a/src/library/scala/runtime/AbstractFunction16.scala
+++ b/src/library/scala/runtime/AbstractFunction16.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction17.scala b/src/library/scala/runtime/AbstractFunction17.scala
index caae343a79..9c36dbf5d8 100644
--- a/src/library/scala/runtime/AbstractFunction17.scala
+++ b/src/library/scala/runtime/AbstractFunction17.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction18.scala b/src/library/scala/runtime/AbstractFunction18.scala
index 9a2bdffee1..30eee9586f 100644
--- a/src/library/scala/runtime/AbstractFunction18.scala
+++ b/src/library/scala/runtime/AbstractFunction18.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction19.scala b/src/library/scala/runtime/AbstractFunction19.scala
index 1dbbd61004..14baf5f1eb 100644
--- a/src/library/scala/runtime/AbstractFunction19.scala
+++ b/src/library/scala/runtime/AbstractFunction19.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction2.scala b/src/library/scala/runtime/AbstractFunction2.scala
index 0905ea178c..223ade9983 100644
--- a/src/library/scala/runtime/AbstractFunction2.scala
+++ b/src/library/scala/runtime/AbstractFunction2.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction20.scala b/src/library/scala/runtime/AbstractFunction20.scala
index eb4c085427..f5c29571bf 100644
--- a/src/library/scala/runtime/AbstractFunction20.scala
+++ b/src/library/scala/runtime/AbstractFunction20.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction21.scala b/src/library/scala/runtime/AbstractFunction21.scala
index 98e32b22f1..15feea3a66 100644
--- a/src/library/scala/runtime/AbstractFunction21.scala
+++ b/src/library/scala/runtime/AbstractFunction21.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction22.scala b/src/library/scala/runtime/AbstractFunction22.scala
index 67b13399d1..d77369ff01 100644
--- a/src/library/scala/runtime/AbstractFunction22.scala
+++ b/src/library/scala/runtime/AbstractFunction22.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction3.scala b/src/library/scala/runtime/AbstractFunction3.scala
index 3a45cdcea5..f863509214 100644
--- a/src/library/scala/runtime/AbstractFunction3.scala
+++ b/src/library/scala/runtime/AbstractFunction3.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction4.scala b/src/library/scala/runtime/AbstractFunction4.scala
index fbf55344f6..5927015ef8 100644
--- a/src/library/scala/runtime/AbstractFunction4.scala
+++ b/src/library/scala/runtime/AbstractFunction4.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction5.scala b/src/library/scala/runtime/AbstractFunction5.scala
index 949bae8ab7..411e1e14bf 100644
--- a/src/library/scala/runtime/AbstractFunction5.scala
+++ b/src/library/scala/runtime/AbstractFunction5.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction6.scala b/src/library/scala/runtime/AbstractFunction6.scala
index 337fd9f3fc..411c30d480 100644
--- a/src/library/scala/runtime/AbstractFunction6.scala
+++ b/src/library/scala/runtime/AbstractFunction6.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction7.scala b/src/library/scala/runtime/AbstractFunction7.scala
index 24458678ad..498f98633a 100644
--- a/src/library/scala/runtime/AbstractFunction7.scala
+++ b/src/library/scala/runtime/AbstractFunction7.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction8.scala b/src/library/scala/runtime/AbstractFunction8.scala
index 6d3dac849e..c6d320b887 100644
--- a/src/library/scala/runtime/AbstractFunction8.scala
+++ b/src/library/scala/runtime/AbstractFunction8.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractFunction9.scala b/src/library/scala/runtime/AbstractFunction9.scala
index 43cf3d2a74..34bd9d7107 100644
--- a/src/library/scala/runtime/AbstractFunction9.scala
+++ b/src/library/scala/runtime/AbstractFunction9.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala
index 57f8e2603b..e3516bc4d9 100644
--- a/src/library/scala/runtime/AbstractPartialFunction.scala
+++ b/src/library/scala/runtime/AbstractPartialFunction.scala
@@ -6,7 +6,10 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
+import scala.annotation.unspecialized
/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction`
* in terms of `isDefinedAt` and `applyOrElse`.
diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java
index 889db31e2a..92e8055351 100644
--- a/src/library/scala/runtime/BooleanRef.java
+++ b/src/library/scala/runtime/BooleanRef.java
@@ -17,4 +17,7 @@ public class BooleanRef implements java.io.Serializable {
public boolean elem;
public BooleanRef(boolean elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static BooleanRef create(boolean e) { return new BooleanRef(e); }
+ public static BooleanRef zero() { return new BooleanRef(false); }
}
diff --git a/src/library/scala/runtime/Boxed.scala b/src/library/scala/runtime/Boxed.scala
index 8b531076ac..855f0ff41a 100644
--- a/src/library/scala/runtime/Boxed.scala
+++ b/src/library/scala/runtime/Boxed.scala
@@ -8,7 +8,9 @@
-package scala.runtime
+package scala
+package runtime
+
trait Boxed {
diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java
index cc10611e26..27d3259db3 100644
--- a/src/library/scala/runtime/ByteRef.java
+++ b/src/library/scala/runtime/ByteRef.java
@@ -17,4 +17,7 @@ public class ByteRef implements java.io.Serializable {
public byte elem;
public ByteRef(byte elem) { this.elem = elem; }
public String toString() { return java.lang.Byte.toString(elem); }
+
+ public static ByteRef create(byte e) { return new ByteRef(e); }
+ public static ByteRef zero() { return new ByteRef((byte)0); }
}
diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java
index 03d3337b3d..31956f5b55 100644
--- a/src/library/scala/runtime/CharRef.java
+++ b/src/library/scala/runtime/CharRef.java
@@ -17,4 +17,7 @@ public class CharRef implements java.io.Serializable {
public char elem;
public CharRef(char elem) { this.elem = elem; }
public String toString() { return java.lang.Character.toString(elem); }
+
+ public static CharRef create(char e) { return new CharRef(e); }
+ public static CharRef zero() { return new CharRef((char)0); }
}
diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java
index 317198ee48..0c7d9156d6 100644
--- a/src/library/scala/runtime/DoubleRef.java
+++ b/src/library/scala/runtime/DoubleRef.java
@@ -17,4 +17,7 @@ public class DoubleRef implements java.io.Serializable {
public double elem;
public DoubleRef(double elem) { this.elem = elem; }
public String toString() { return java.lang.Double.toString(elem); }
+
+ public static DoubleRef create(double e) { return new DoubleRef(e); }
+ public static DoubleRef zero() { return new DoubleRef(0); }
}
diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java
index e26b89be60..f0e1d5f8f3 100644
--- a/src/library/scala/runtime/FloatRef.java
+++ b/src/library/scala/runtime/FloatRef.java
@@ -17,4 +17,7 @@ public class FloatRef implements java.io.Serializable {
public float elem;
public FloatRef(float elem) { this.elem = elem; }
public String toString() { return java.lang.Float.toString(elem); }
+
+ public static FloatRef create(float e) { return new FloatRef(e); }
+ public static FloatRef zero() { return new FloatRef(0); }
}
diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java
index edb6fafe94..adcf474aae 100644
--- a/src/library/scala/runtime/IntRef.java
+++ b/src/library/scala/runtime/IntRef.java
@@ -17,4 +17,7 @@ public class IntRef implements java.io.Serializable {
public int elem;
public IntRef(int elem) { this.elem = elem; }
public String toString() { return java.lang.Integer.toString(elem); }
+
+ public static IntRef create(int e) { return new IntRef(e); }
+ public static IntRef zero() { return new IntRef(0); }
}
diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java
index 12004b5bb5..51426ab8f6 100644
--- a/src/library/scala/runtime/LongRef.java
+++ b/src/library/scala/runtime/LongRef.java
@@ -17,4 +17,7 @@ public class LongRef implements java.io.Serializable {
public long elem;
public LongRef(long elem) { this.elem = elem; }
public String toString() { return java.lang.Long.toString(elem); }
+
+ public static LongRef create(long e) { return new LongRef(e); }
+ public static LongRef zero() { return new LongRef(0); }
}
diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala
index 217b51893b..bbf80593db 100644
--- a/src/library/scala/runtime/MethodCache.scala
+++ b/src/library/scala/runtime/MethodCache.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import java.lang.reflect.{ Method => JMethod }
import java.lang.{ Class => JClass }
diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala
index b9525ef419..16b2fec6d7 100644
--- a/src/library/scala/runtime/NonLocalReturnControl.scala
+++ b/src/library/scala/runtime/NonLocalReturnControl.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.util.control.ControlThrowable
diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala
index 04fcc55a1c..4ecc536223 100644
--- a/src/library/scala/runtime/Nothing$.scala
+++ b/src/library/scala/runtime/Nothing$.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
/**
* Dummy class which exist only to satisfy the JVM. It corresponds
diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala
index 797b31583d..1b7685c507 100644
--- a/src/library/scala/runtime/Null$.scala
+++ b/src/library/scala/runtime/Null$.scala
@@ -6,11 +6,14 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
/**
* Dummy class which exist only to satisfy the JVM. It corresponds to
* `scala.Null`. If such type appears in method signatures, it is erased
- * to this one.
+ * to this one. A private constructor ensures that Java code can't create
+ * subclasses. The only value of type Null$ should be null
*/
-sealed abstract class Null$
+sealed abstract class Null$ private ()
diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java
index c8298b8b21..c553c780a8 100644
--- a/src/library/scala/runtime/ObjectRef.java
+++ b/src/library/scala/runtime/ObjectRef.java
@@ -17,4 +17,7 @@ public class ObjectRef<T> implements java.io.Serializable {
public T elem;
public ObjectRef(T elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static <U> ObjectRef create(U e) { return new ObjectRef(e); }
+ public static ObjectRef zero() { return new ObjectRef(null); }
}
diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala
index 97e2b77f96..4f867960a0 100644
--- a/src/library/scala/runtime/RichBoolean.scala
+++ b/src/library/scala/runtime/RichBoolean.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichBoolean(val self: Boolean) extends AnyVal with OrderedProxy[Boolean] {
protected def ord = scala.math.Ordering.Boolean
diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala
index ca578620cf..ea23cb8867 100644
--- a/src/library/scala/runtime/RichByte.scala
+++ b/src/library/scala/runtime/RichByte.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichByte(val self: Byte) extends AnyVal with ScalaWholeNumberProxy[Byte] {
protected def num = scala.math.Numeric.ByteIsIntegral
diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala
index 5124ca00de..c069fd1fef 100644
--- a/src/library/scala/runtime/RichChar.scala
+++ b/src/library/scala/runtime/RichChar.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import java.lang.Character
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index 94c4137674..f01788a4e9 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -6,10 +6,12 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.compat.Platform.EOL
+@deprecated("Use Throwable#getStackTrace", "2.11.0")
final class RichException(exc: Throwable) {
def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
}
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index cb0681bc19..0bca033b7b 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -15,22 +15,22 @@ final class RichFloat(val self: Float) extends AnyVal with FractionalProxy[Float
protected def integralNum = scala.math.Numeric.FloatAsIfIntegral
def round: Int = math.round(self)
- def ceil: Float = math.ceil(self).toFloat
- def floor: Float = math.floor(self).toFloat
+ def ceil: Float = math.ceil(self.toDouble).toFloat
+ def floor: Float = math.floor(self.toDouble).toFloat
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
*
* @return the measurement of the angle `x` in radians.
*/
- def toRadians: Float = math.toRadians(self).toFloat
+ def toRadians: Float = math.toRadians(self.toDouble).toFloat
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
*
* @return the measurement of the angle `x` in degrees.
*/
- def toDegrees: Float = math.toDegrees(self).toFloat
+ def toDegrees: Float = math.toDegrees(self.toDouble).toFloat
// isNaN is provided by the implicit conversion to java.lang.Float
// def isNaN: Boolean = java.lang.Float.isNaN(self)
diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala
index 192f94f939..a39710b146 100644
--- a/src/library/scala/runtime/RichInt.scala
+++ b/src/library/scala/runtime/RichInt.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.collection.immutable.Range
diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala
index ce2d1fdcbd..e5b39b1c09 100644
--- a/src/library/scala/runtime/RichLong.scala
+++ b/src/library/scala/runtime/RichLong.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] {
protected def num = scala.math.Numeric.LongIsIntegral
diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala
index aa24dd2ba6..3e5d8781ff 100644
--- a/src/library/scala/runtime/RichShort.scala
+++ b/src/library/scala/runtime/RichShort.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
final class RichShort(val self: Short) extends AnyVal with ScalaWholeNumberProxy[Short] {
protected def num = scala.math.Numeric.ShortIsIntegral
diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala
index 76fc38b267..6ea6448b1a 100644
--- a/src/library/scala/runtime/ScalaNumberProxy.scala
+++ b/src/library/scala/runtime/ScalaNumberProxy.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
import scala.collection.{ mutable, immutable }
import scala.math.{ ScalaNumericConversions, ScalaNumericAnyConversions }
@@ -28,8 +29,8 @@ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed
def floatValue() = num.toFloat(self)
def longValue() = num.toLong(self)
def intValue() = num.toInt(self)
- def byteValue() = intValue.toByte
- def shortValue() = intValue.toShort
+ def byteValue() = intValue().toByte
+ def shortValue() = intValue().toShort
def min(that: T): T = num.min(self, that)
def max(that: T): T = num.max(self, that)
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index 1d8fe5e9ad..3a85207235 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -26,8 +26,7 @@ import java.lang.reflect.{ Modifier, Method => JMethod }
* outside the API and subject to change or removal without notice.
*/
object ScalaRunTime {
- def isArray(x: AnyRef): Boolean = isArray(x, 1)
- def isArray(x: Any, atLevel: Int): Boolean =
+ def isArray(x: Any, atLevel: Int = 1): Boolean =
x != null && isArrayClass(x.getClass, atLevel)
private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean =
@@ -159,13 +158,7 @@ object ScalaRunTime {
// Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957
// More background at ticket #2318.
- def ensureAccessible(m: JMethod): JMethod = {
- if (!m.isAccessible) {
- try m setAccessible true
- catch { case _: SecurityException => () }
- }
- m
- }
+ def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m)
def checkInitialized[T <: AnyRef](x: T): T =
if (x == null) throw new UninitializedError else x
@@ -228,7 +221,7 @@ object ScalaRunTime {
if (iv == fv) return iv
val lv = fv.toLong
- if (lv == fv) return hash(lv)
+ if (lv == fv) hash(lv)
else fv.hashCode
}
def hash(lv: Long): Int = {
@@ -357,4 +350,17 @@ object ScalaRunTime {
}
}
}
+
+ def box[T](clazz: jClass[T]): jClass[_] = clazz match {
+ case java.lang.Byte.TYPE => classOf[java.lang.Byte]
+ case java.lang.Short.TYPE => classOf[java.lang.Short]
+ case java.lang.Character.TYPE => classOf[java.lang.Character]
+ case java.lang.Integer.TYPE => classOf[java.lang.Integer]
+ case java.lang.Long.TYPE => classOf[java.lang.Long]
+ case java.lang.Float.TYPE => classOf[java.lang.Float]
+ case java.lang.Double.TYPE => classOf[java.lang.Double]
+ case java.lang.Void.TYPE => classOf[scala.runtime.BoxedUnit]
+ case java.lang.Boolean.TYPE => classOf[java.lang.Boolean]
+ case _ => clazz
+ }
}
diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala
index d2084a6598..ce7d7afc9e 100644
--- a/src/library/scala/runtime/SeqCharSequence.scala
+++ b/src/library/scala/runtime/SeqCharSequence.scala
@@ -11,6 +11,7 @@ package runtime
import java.util.Arrays.copyOfRange
+@deprecated("Use Predef.SeqCharSequence", "2.11.0")
final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence {
def length: Int = xs.length
def charAt(index: Int): Char = xs(index)
@@ -18,6 +19,8 @@ final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends C
override def toString = xs.mkString("")
}
+// Still need this one since the implicit class ArrayCharSequence only converts
+// a single argument.
final class ArrayCharSequence(val xs: Array[Char], start: Int, end: Int) extends CharSequence {
// yikes
// java.lang.VerifyError: (class: scala/runtime/ArrayCharSequence, method: <init> signature: ([C)V)
diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java
index 461b521e5f..e5e8de3d8b 100644
--- a/src/library/scala/runtime/ShortRef.java
+++ b/src/library/scala/runtime/ShortRef.java
@@ -17,4 +17,7 @@ public class ShortRef implements java.io.Serializable {
public short elem;
public ShortRef(short elem) { this.elem = elem; }
public String toString() { return java.lang.Short.toString(elem); }
+
+ public static ShortRef create(short e) { return new ShortRef(e); }
+ public static ShortRef zero() { return new ShortRef((short)0); }
}
diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala
index 9d848f0ba7..d5b51a6e92 100644
--- a/src/library/scala/runtime/StringAdd.scala
+++ b/src/library/scala/runtime/StringAdd.scala
@@ -6,9 +6,12 @@
** **
\* */
-package scala.runtime
+package scala
+package runtime
+
/** A wrapper class that adds string concatenation `+` to any value */
+@deprecated("Use Predef.StringAdd", "2.11.0")
final class StringAdd(val self: Any) extends AnyVal {
def +(other: String) = String.valueOf(self) + other
}
diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala
index 983ae2fc54..de32ac7e86 100644
--- a/src/library/scala/runtime/StringFormat.scala
+++ b/src/library/scala/runtime/StringFormat.scala
@@ -6,10 +6,13 @@
** **
\* */
-package scala.runtime
+package scala
+package runtime
+
/** A wrapper class that adds a `formatted` operation to any value
*/
+@deprecated("Use Predef.StringFormat", "2.11.0")
final class StringFormat(val self: Any) extends AnyVal {
/** Returns string formatted according to given `format` string.
* Format strings are as for `String.format`
diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala
index ef29075ac3..b28f6d4269 100644
--- a/src/library/scala/runtime/Tuple2Zipped.scala
+++ b/src/library/scala/runtime/Tuple2Zipped.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
@@ -37,12 +39,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext)
- b += f(el1, elems2.next)
+ b += f(el1, elems2.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
@@ -51,12 +53,12 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext)
- b ++= f(el1, elems2.next)
+ b ++= f(el1, elems2.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = {
@@ -66,16 +68,16 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext) {
- val el2 = elems2.next
+ val el2 = elems2.next()
if (f(el1, el2)) {
b1 += el1
b2 += el2
}
}
- else return (b1.result, b2.result)
+ else return (b1.result(), b2.result())
}
- (b1.result, b2.result)
+ (b1.result(), b2.result())
}
def exists(f: (El1, El2) => Boolean): Boolean = {
@@ -83,7 +85,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext) {
- if (f(el1, elems2.next))
+ if (f(el1, elems2.next()))
return true
}
else return false
@@ -99,7 +101,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1
for (el1 <- colls._1) {
if (elems2.hasNext)
- f(el1, elems2.next)
+ f(el1, elems2.next())
else
return
}
@@ -117,9 +119,9 @@ object Tuple2Zipped {
val it1 = x._1.toIterator
val it2 = x._2.toIterator
while (it1.hasNext && it2.hasNext)
- buf += ((it1.next, it2.next))
+ buf += ((it1.next(), it2.next()))
- buf.result
+ buf.result()
}
def zipped[El1, Repr1, El2, Repr2]
diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala
index 3f2afaf772..7c501380a3 100644
--- a/src/library/scala/runtime/Tuple3Zipped.scala
+++ b/src/library/scala/runtime/Tuple3Zipped.scala
@@ -6,7 +6,9 @@
** |/ **
\* */
-package scala.runtime
+package scala
+package runtime
+
import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.{ CanBuildFrom => CBF }
@@ -34,11 +36,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
- b += f(el1, elems2.next, elems3.next)
+ b += f(el1, elems2.next(), elems3.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = {
@@ -48,11 +50,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
- b ++= f(el1, elems2.next, elems3.next)
+ b ++= f(el1, elems2.next(), elems3.next())
else
- return b.result
+ return b.result()
}
- b.result
+ b.result()
}
def filter[To1, To2, To3](f: (El1, El2, El3) => Boolean)(
@@ -64,12 +66,12 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
val b3 = cbf3(colls._3.repr)
val elems2 = colls._2.iterator
val elems3 = colls._3.iterator
- def result = (b1.result, b2.result, b3.result)
+ def result = (b1.result(), b2.result(), b3.result())
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
- val el2 = elems2.next
- val el3 = elems3.next
+ val el2 = elems2.next()
+ val el3 = elems3.next()
if (f(el1, el2, el3)) {
b1 += el1
@@ -89,7 +91,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext) {
- if (f(el1, elems2.next, elems3.next))
+ if (f(el1, elems2.next(), elems3.next()))
return true
}
else return false
@@ -106,7 +108,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers
for (el1 <- colls._1) {
if (elems2.hasNext && elems3.hasNext)
- f(el1, elems2.next, elems3.next)
+ f(el1, elems2.next(), elems3.next())
else
return
}
@@ -126,9 +128,9 @@ object Tuple3Zipped {
val it2 = x._2.toIterator
val it3 = x._3.toIterator
while (it1.hasNext && it2.hasNext && it3.hasNext)
- buf += ((it1.next, it2.next, it3.next))
+ buf += ((it1.next(), it2.next(), it3.next()))
- buf.result
+ buf.result()
}
def zipped[El1, Repr1, El2, Repr2, El3, Repr3]
diff --git a/src/library/scala/runtime/VolatileBooleanRef.java b/src/library/scala/runtime/VolatileBooleanRef.java
index e3bd182345..ef5b691118 100755
--- a/src/library/scala/runtime/VolatileBooleanRef.java
+++ b/src/library/scala/runtime/VolatileBooleanRef.java
@@ -17,4 +17,7 @@ public class VolatileBooleanRef implements java.io.Serializable {
volatile public boolean elem;
public VolatileBooleanRef(boolean elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static VolatileBooleanRef create(boolean e) { return new VolatileBooleanRef(e); }
+ public static VolatileBooleanRef zero() { return new VolatileBooleanRef(false); }
}
diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java
index 034b003017..d792b0a386 100755
--- a/src/library/scala/runtime/VolatileByteRef.java
+++ b/src/library/scala/runtime/VolatileByteRef.java
@@ -17,4 +17,7 @@ public class VolatileByteRef implements java.io.Serializable {
volatile public byte elem;
public VolatileByteRef(byte elem) { this.elem = elem; }
public String toString() { return java.lang.Byte.toString(elem); }
+
+ public static VolatileByteRef create(byte e) { return new VolatileByteRef(e); }
+ public static VolatileByteRef zero() { return new VolatileByteRef((byte)0); }
}
diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java
index f90648c5e9..555b171283 100755
--- a/src/library/scala/runtime/VolatileCharRef.java
+++ b/src/library/scala/runtime/VolatileCharRef.java
@@ -17,4 +17,7 @@ public class VolatileCharRef implements java.io.Serializable {
volatile public char elem;
public VolatileCharRef(char elem) { this.elem = elem; }
public String toString() { return java.lang.Character.toString(elem); }
+
+ public static VolatileCharRef create(char e) { return new VolatileCharRef(e); }
+ public static VolatileCharRef zero() { return new VolatileCharRef((char)0); }
}
diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java
index d47c9578c6..1932055c6a 100755
--- a/src/library/scala/runtime/VolatileDoubleRef.java
+++ b/src/library/scala/runtime/VolatileDoubleRef.java
@@ -16,4 +16,7 @@ public class VolatileDoubleRef implements java.io.Serializable {
volatile public double elem;
public VolatileDoubleRef(double elem) { this.elem = elem; }
public String toString() { return java.lang.Double.toString(elem); }
+
+ public static VolatileDoubleRef create(double e) { return new VolatileDoubleRef(e); }
+ public static VolatileDoubleRef zero() { return new VolatileDoubleRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java
index 97da95f7cc..3a81be1146 100755
--- a/src/library/scala/runtime/VolatileFloatRef.java
+++ b/src/library/scala/runtime/VolatileFloatRef.java
@@ -17,4 +17,7 @@ public class VolatileFloatRef implements java.io.Serializable {
volatile public float elem;
public VolatileFloatRef(float elem) { this.elem = elem; }
public String toString() { return java.lang.Float.toString(elem); }
+
+ public static VolatileFloatRef create(float e) { return new VolatileFloatRef(e); }
+ public static VolatileFloatRef zero() { return new VolatileFloatRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java
index e8a68a1a9a..ae015bc8b1 100755
--- a/src/library/scala/runtime/VolatileIntRef.java
+++ b/src/library/scala/runtime/VolatileIntRef.java
@@ -16,4 +16,7 @@ public class VolatileIntRef implements java.io.Serializable {
volatile public int elem;
public VolatileIntRef(int elem) { this.elem = elem; }
public String toString() { return java.lang.Integer.toString(elem); }
+
+ public static VolatileIntRef create(int e) { return new VolatileIntRef(e); }
+ public static VolatileIntRef zero() { return new VolatileIntRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java
index 80e627cd4b..e596f5aa69 100755
--- a/src/library/scala/runtime/VolatileLongRef.java
+++ b/src/library/scala/runtime/VolatileLongRef.java
@@ -17,4 +17,7 @@ public class VolatileLongRef implements java.io.Serializable {
volatile public long elem;
public VolatileLongRef(long elem) { this.elem = elem; }
public String toString() { return java.lang.Long.toString(elem); }
+
+ public static VolatileLongRef create(long e) { return new VolatileLongRef(e); }
+ public static VolatileLongRef zero() { return new VolatileLongRef(0); }
}
diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java
index 848b0632ea..9f1f3ac0cf 100755
--- a/src/library/scala/runtime/VolatileObjectRef.java
+++ b/src/library/scala/runtime/VolatileObjectRef.java
@@ -17,4 +17,7 @@ public class VolatileObjectRef<T> implements java.io.Serializable {
volatile public T elem;
public VolatileObjectRef(T elem) { this.elem = elem; }
public String toString() { return String.valueOf(elem); }
+
+ public static <U> VolatileObjectRef create(U e) { return new VolatileObjectRef(e); }
+ public static VolatileObjectRef zero() { return new VolatileObjectRef(null); }
}
diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java
index 4e91d0dc70..0a2825941f 100755
--- a/src/library/scala/runtime/VolatileShortRef.java
+++ b/src/library/scala/runtime/VolatileShortRef.java
@@ -17,4 +17,7 @@ public class VolatileShortRef implements java.io.Serializable {
volatile public short elem;
public VolatileShortRef(short elem) { this.elem = elem; }
public String toString() { return java.lang.Short.toString(elem); }
+
+ public static VolatileShortRef create(short e) { return new VolatileShortRef(e); }
+ public static VolatileShortRef zero() { return new VolatileShortRef((short)0); }
}
diff --git a/src/library/scala/runtime/WorksheetSupport.scala b/src/library/scala/runtime/WorksheetSupport.scala
index 016a0d04e0..2a0064494b 100644
--- a/src/library/scala/runtime/WorksheetSupport.scala
+++ b/src/library/scala/runtime/WorksheetSupport.scala
@@ -21,7 +21,7 @@ object WorksheetSupport {
private var lastFlush: Long = 0L
private var col = -1
override def write(b: Array[Byte], off: Int, len: Int) = {
- for (idx <- off until (off + len min b.length)) writeOne(b(idx))
+ for (idx <- off until (off + len min b.length)) writeOne(b(idx).toInt)
flush()
}
override def write(c: Int) {
diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala
index e3c25bbd11..74b0a9077b 100644
--- a/src/library/scala/sys/BooleanProp.scala
+++ b/src/library/scala/sys/BooleanProp.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
import scala.language.implicitConversions
diff --git a/src/library/scala/sys/PropImpl.scala b/src/library/scala/sys/PropImpl.scala
index b50e0e18a7..3b451ab1d9 100644
--- a/src/library/scala/sys/PropImpl.scala
+++ b/src/library/scala/sys/PropImpl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
import scala.collection.mutable
diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala
index a8f4871870..6018ac852b 100644
--- a/src/library/scala/sys/ShutdownHookThread.scala
+++ b/src/library/scala/sys/ShutdownHookThread.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
/** A minimal Thread wrapper to enhance shutdown hooks. It knows
* how to unregister itself.
diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala
index da9adb3dc2..39f66f5030 100644
--- a/src/library/scala/sys/SystemProperties.scala
+++ b/src/library/scala/sys/SystemProperties.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
import scala.collection.{ mutable, Iterator }
import scala.collection.JavaConverters._
@@ -64,7 +65,6 @@ object SystemProperties {
propertyHelp(p.key) = helpText
p
}
- private def str(key: String, helpText: String) = addHelp(Prop[String](key), helpText)
private def bool(key: String, helpText: String): BooleanProp = addHelp[BooleanProp](
if (key startsWith "java.") BooleanProp.valueIsTrue(key) else BooleanProp.keyExists(key),
helpText
diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala
index 0003df6c52..b31bbf0540 100644
--- a/src/library/scala/sys/process/BasicIO.scala
+++ b/src/library/scala/sys/process/BasicIO.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
@@ -46,7 +47,7 @@ object BasicIO {
def next(): Stream[T] = q.take match {
case Left(0) => Stream.empty
case Left(code) => if (nonzeroException) scala.sys.error("Nonzero exit code: " + code) else Stream.empty
- case Right(s) => Stream.cons(s, next)
+ case Right(s) => Stream.cons(s, next())
}
new Streamed((s: T) => q put Right(s), code => q put Left(code), () => next())
}
@@ -161,21 +162,29 @@ object BasicIO {
*/
def processFully(processLine: String => Unit): InputStream => Unit = in => {
val reader = new BufferedReader(new InputStreamReader(in))
- processLinesFully(processLine)(reader.readLine)
- reader.close()
+ try processLinesFully(processLine)(reader.readLine)
+ finally reader.close()
}
/** Calls `processLine` with the result of `readLine` until the latter returns
- * `null`.
- */
+ * `null` or the current thread is interrupted.
+ */
def processLinesFully(processLine: String => Unit)(readLine: () => String) {
- def readFully() {
- val line = readLine()
- if (line != null) {
- processLine(line)
- readFully()
+ def working = (Thread.currentThread.isInterrupted == false)
+ def halting = { Thread.currentThread.interrupt(); null }
+ def readFully(): Unit =
+ if (working) {
+ val line =
+ try readLine()
+ catch {
+ case _: InterruptedException => halting
+ case e: IOException if !working => halting
+ }
+ if (line != null) {
+ processLine(line)
+ readFully()
+ }
}
- }
readFully()
}
diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala
index 715b364e08..402183a1f0 100644
--- a/src/library/scala/sys/process/Process.scala
+++ b/src/library/scala/sys/process/Process.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala
index d0b2ecfe73..c8e548c76b 100644
--- a/src/library/scala/sys/process/ProcessBuilder.scala
+++ b/src/library/scala/sys/process/ProcessBuilder.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
@@ -23,7 +24,7 @@ import ProcessBuilder._
* based on these factories made available in the package object
* [[scala.sys.process]]. Here are some examples:
* {{{
- * import.scala.sys.process._
+ * import scala.sys.process._
*
* // Executes "ls" and sends output to stdout
* "ls".!
@@ -46,14 +47,14 @@ import ProcessBuilder._
*
* Two existing `ProcessBuilder` can be combined in the following ways:
*
- * * They can be executed in parallel, with the output of the first being fed
- * as input to the second, like Unix pipes. This is achieved with the `#|`
- * method.
- * * They can be executed in sequence, with the second starting as soon as
- * the first ends. This is done by the `###` method.
- * * The execution of the second one can be conditioned by the return code
- * (exit status) of the first, either only when it's zero, or only when it's
- * not zero. The methods `#&&` and `#||` accomplish these tasks.
+ * - They can be executed in parallel, with the output of the first being fed
+ * as input to the second, like Unix pipes. This is achieved with the `#|`
+ * method.
+ * - They can be executed in sequence, with the second starting as soon as
+ * the first ends. This is done by the `###` method.
+ * - The execution of the second one can be conditioned by the return code
+ * (exit status) of the first, either only when it's zero, or only when it's
+ * not zero. The methods `#&&` and `#||` accomplish these tasks.
*
* ==Redirecting Input/Output==
*
@@ -74,18 +75,18 @@ import ProcessBuilder._
* overloads and variations to enable further control over the I/O. These
* methods are:
*
- * * `run`: the most general method, it returns a
- * [[scala.sys.process.Process]] immediately, and the external command
- * executes concurrently.
- * * `!`: blocks until all external commands exit, and returns the exit code
- * of the last one in the chain of execution.
- * * `!!`: blocks until all external commands exit, and returns a `String`
- * with the output generated.
- * * `lines`: returns immediately like `run`, and the output being generared
- * is provided through a `Stream[String]`. Getting the next element of that
- * `Stream` may block until it becomes available. This method will throw an
- * exception if the return code is different than zero -- if this is not
- * desired, use the `lines_!` method.
+ * - `run`: the most general method, it returns a
+ * [[scala.sys.process.Process]] immediately, and the external command
+ * executes concurrently.
+ * - `!`: blocks until all external commands exit, and returns the exit code
+ * of the last one in the chain of execution.
+ * - `!!`: blocks until all external commands exit, and returns a `String`
+ * with the output generated.
+ * - `lines`: returns immediately like `run`, and the output being generared
+ * is provided through a `Stream[String]`. Getting the next element of that
+ * `Stream` may block until it becomes available. This method will throw an
+ * exception if the return code is different than zero -- if this is not
+ * desired, use the `lines_!` method.
*
* ==Handling Input and Output==
*
@@ -305,10 +306,10 @@ object ProcessBuilder extends ProcessBuilderImpl {
protected def toSource: ProcessBuilder
/** Writes the output stream of this process to the given file. */
- def #> (f: File): ProcessBuilder = toFile(f, false)
+ def #> (f: File): ProcessBuilder = toFile(f, append = false)
/** Appends the output stream of this process to the given file. */
- def #>> (f: File): ProcessBuilder = toFile(f, true)
+ def #>> (f: File): ProcessBuilder = toFile(f, append = true)
/** Writes the output stream of this process to the given OutputStream. The
* argument is call-by-name, so the stream is recreated, written, and closed each
diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala
index 49fea6f464..adf6d1e724 100644
--- a/src/library/scala/sys/process/ProcessBuilderImpl.scala
+++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
@@ -69,7 +70,7 @@ private[process] trait ProcessBuilderImpl {
import io._
// spawn threads that process the input, output, and error streams using the functions defined in `io`
- val inThread = Spawn(writeInput(process.getOutputStream), true)
+ val inThread = Spawn(writeInput(process.getOutputStream), daemon = true)
val outThread = Spawn(processOutput(process.getInputStream), daemonizeThreads)
val errorThread =
if (p.redirectErrorStream) Nil
@@ -93,26 +94,26 @@ private[process] trait ProcessBuilderImpl {
def #&&(other: ProcessBuilder): ProcessBuilder = new AndBuilder(this, other)
def ###(other: ProcessBuilder): ProcessBuilder = new SequenceBuilder(this, other)
- def run(): Process = run(false)
+ def run(): Process = run(connectInput = false)
def run(connectInput: Boolean): Process = run(BasicIO.standard(connectInput))
- def run(log: ProcessLogger): Process = run(log, false)
+ def run(log: ProcessLogger): Process = run(log, connectInput = false)
def run(log: ProcessLogger, connectInput: Boolean): Process = run(BasicIO(connectInput, log))
- def !! = slurp(None, false)
- def !!(log: ProcessLogger) = slurp(Some(log), false)
- def !!< = slurp(None, true)
- def !!<(log: ProcessLogger) = slurp(Some(log), true)
+ def !! = slurp(None, withIn = false)
+ def !!(log: ProcessLogger) = slurp(Some(log), withIn = false)
+ def !!< = slurp(None, withIn = true)
+ def !!<(log: ProcessLogger) = slurp(Some(log), withIn = true)
- def lines: Stream[String] = lines(false, true, None)
- def lines(log: ProcessLogger): Stream[String] = lines(false, true, Some(log))
- def lines_! : Stream[String] = lines(false, false, None)
- def lines_!(log: ProcessLogger): Stream[String] = lines(false, false, Some(log))
+ def lines: Stream[String] = lines(withInput = false, nonZeroException = true, None)
+ def lines(log: ProcessLogger): Stream[String] = lines(withInput = false, nonZeroException = true, Some(log))
+ def lines_! : Stream[String] = lines(withInput = false, nonZeroException = false, None)
+ def lines_!(log: ProcessLogger): Stream[String] = lines(withInput = false, nonZeroException = false, Some(log))
- def ! = run(false).exitValue()
+ def ! = run(connectInput = false).exitValue()
def !(io: ProcessIO) = run(io).exitValue()
- def !(log: ProcessLogger) = runBuffered(log, false)
- def !< = run(true).exitValue()
- def !<(log: ProcessLogger) = runBuffered(log, true)
+ def !(log: ProcessLogger) = runBuffered(log, connectInput = false)
+ def !< = run(connectInput = true).exitValue()
+ def !<(log: ProcessLogger) = runBuffered(log, connectInput = true)
/** Constructs a new builder which runs this command with all input/output threads marked
* as daemon threads. This allows the creation of a long running process while still
diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala
index f5b26680d9..eedf667c88 100644
--- a/src/library/scala/sys/process/ProcessIO.scala
+++ b/src/library/scala/sys/process/ProcessIO.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala
index c21c0daa5e..2b7fcdeb73 100644
--- a/src/library/scala/sys/process/ProcessImpl.scala
+++ b/src/library/scala/sys/process/ProcessImpl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import processInternal._
@@ -17,7 +18,7 @@ private[process] trait ProcessImpl {
/** Runs provided code in a new Thread and returns the Thread instance. */
private[process] object Spawn {
- def apply(f: => Unit): Thread = apply(f, false)
+ def apply(f: => Unit): Thread = apply(f, daemon = false)
def apply(f: => Unit, daemon: Boolean): Thread = {
val thread = new Thread() { override def run() = { f } }
thread.setDaemon(daemon)
@@ -32,7 +33,7 @@ private[process] trait ProcessImpl {
try result set Right(f)
catch { case e: Exception => result set Left(e) }
- Spawn(run)
+ Spawn(run())
() => result.get match {
case Right(value) => value
@@ -68,10 +69,10 @@ private[process] trait ProcessImpl {
protected[this] override def runAndExitValue() = {
val first = a.run(io)
- runInterruptible(first.exitValue)(first.destroy()) flatMap { codeA =>
+ runInterruptible(first.exitValue())(first.destroy()) flatMap { codeA =>
if (evaluateSecondProcess(codeA)) {
val second = b.run(io)
- runInterruptible(second.exitValue)(second.destroy())
+ runInterruptible(second.exitValue())(second.destroy())
}
else Some(codeA)
}
@@ -132,10 +133,10 @@ private[process] trait ProcessImpl {
val first = a.run(firstIO)
try {
runInterruptible {
- val exit1 = first.exitValue
+ val exit1 = first.exitValue()
currentSource put None
currentSink put None
- val exit2 = second.exitValue
+ val exit2 = second.exitValue()
// Since file redirection (e.g. #>) is implemented as a piped process,
// we ignore its exit value so cmd #> file doesn't always return 0.
if (b.hasExitValue) exit2 else exit1
@@ -222,8 +223,8 @@ private[process] trait ProcessImpl {
p.exitValue()
}
override def destroy() = {
- try{
- outputThreads foreach (_.stop())
+ try {
+ outputThreads foreach (_.interrupt()) // on destroy, don't bother consuming any more output
p.destroy()
}
finally inputThread.interrupt()
diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala
index a4acb065d0..ae347221ef 100644
--- a/src/library/scala/sys/process/ProcessLogger.scala
+++ b/src/library/scala/sys/process/ProcessLogger.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.sys
+package scala
+package sys
package process
import java.io._
diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala
index ed436febc0..902543665f 100644
--- a/src/library/scala/sys/process/package.scala
+++ b/src/library/scala/sys/process/package.scala
@@ -25,7 +25,7 @@ package scala.sys {
*
* {{{
* import scala.sys.process._
- * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines
+ * "ls" #| "grep .scala" #&& Seq("sh", "-c", "scalac *.scala") #|| "echo nothing found" lines
* }}}
*
* We describe below the general concepts and architecture of the package,
diff --git a/src/library/scala/testing/Benchmark.scala b/src/library/scala/testing/Benchmark.scala
deleted file mode 100644
index 66d7d448eb..0000000000
--- a/src/library/scala/testing/Benchmark.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.testing
-
-import scala.compat.Platform
-
-/** `Benchmark` can be used to quickly turn an existing class into a
- * benchmark. Here is a short example:
- * {{{
- * object sort1 extends Sorter with Benchmark {
- * def run = sort(List.range(1, 1000))
- * }
- * }}}
- * The `run` method has to be defined by the user, who will perform the
- * timed operation there. Run the benchmark as follows:
- * {{{
- * > scala sort1 5
- * }}}
- * This will run the benchmark 5 times, forcing a garbage collection
- * between runs, and printing the execution times to stdout.
- *
- * It is also possible to add a multiplier, so
- * {{{
- * > scala sort1 5 10
- * }}}
- * will run the entire benchmark 10 times, each time for 5 runs.
- *
- * @author Iulian Dragos, Burak Emir
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait Benchmark {
-
- /** this method should be implemented by the concrete benchmark.
- * This method is called by the benchmarking code for a number of times.
- * The GC is called between "multiplier" calls to run, right after tear
- * down.
- *
- * @see setUp
- * @see tearDown
- */
- def run()
-
- var multiplier = 1
-
- /** Run the benchmark the specified number of times and return a list with
- * the execution times in milliseconds in reverse order of the execution.
- */
- def runBenchmark(noTimes: Int): List[Long] =
- for (i <- List.range(1, noTimes + 1)) yield {
- setUp
- val startTime = Platform.currentTime
- var i = 0; while (i < multiplier) {
- run()
- i += 1
- }
- val stopTime = Platform.currentTime
- tearDown
- Platform.collectGarbage
-
- stopTime - startTime
- }
-
- /** Prepare any data needed by the benchmark, but whose execution time
- * should not be measured. This method is run before each call to the
- * benchmark payload, 'run'.
- */
- def setUp() {}
-
- /** Perform cleanup operations after each 'run'. For micro benchmarks,
- * think about using the result of 'run' in a way that prevents the JVM
- * to dead-code eliminate the whole 'run' method. For instance, print or
- * write the results to a file. The execution time of this method is not
- * measured.
- */
- def tearDown() {}
-
- /** a string that is written at the beginning of the output line
- * that contains the timings. By default, this is the class name.
- */
- def prefix: String = getClass().getName()
-
- /**
- * The entry point. It takes two arguments:
- * - argument `n` is the number of consecutive runs
- * - optional argument `mult` specifies that the `n` runs are repeated
- * `mult` times.
- */
- def main(args: Array[String]) {
- if (args.length > 0) {
- val logFile = new java.io.OutputStreamWriter(System.out)
- if (args.length > 1) multiplier = args(1).toInt
- logFile.write(prefix)
- for (t <- runBenchmark(args(0).toInt))
- logFile.write("\t" + t)
-
- logFile.write(Platform.EOL)
- logFile.flush()
- } else {
- println("Usage: scala benchmarks.program <runs> ")
- println(" or: scala benchmarks.program <runs> <multiplier>")
- println("""
- The benchmark is run <runs> times, forcing a garbage collection between runs. The optional
- <multiplier> causes the benchmark to be repeated <multiplier> times, each time for <runs>
- executions.
- """)
- }
- }
-}
diff --git a/src/library/scala/testing/Show.scala b/src/library/scala/testing/Show.scala
deleted file mode 100644
index 9376e26db4..0000000000
--- a/src/library/scala/testing/Show.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.testing
-
-/** Classes inheriting trait `Show` can test their member methods using the
- * notation `meth(arg,,1,,, ..., arg,,n,,)`, where `meth` is the name of
- * the method and `arg,,1,,,...,arg,,n,,` are the arguments.
- *
- * The only difference to a normal method call is the leading quote
- * character (`'`). A quoted method call like the one above will produces
- * a legible diagnostic to be printed on [[scala.Console]].
- *
- * It is of the form
- *
- * `meth(arg,,1,,, ..., arg,,n,,)` gives `&lt;result&gt;`
- *
- * where `&lt;result&gt;` is the result of evaluating the call.
- *
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait Show {
-
- /** An implicit definition that adds an apply method to Symbol which forwards to `test`.
- * Prints out diagnostics of method applications.
- */
- implicit class SymApply(f: Symbol) {
- def apply[A](args: A*) {
- println(test(f, args: _*))
- }
- }
-
- @deprecated("use SymApply instead", "2.10.0")
- def symApply(sym: Symbol): SymApply = new SymApply(sym)
-
- /** Apply method with name of given symbol `f` to given arguments and return
- * a result diagnostics.
- */
- def test[A](f: Symbol, args: A*): String = {
- val args1 = args map (_.asInstanceOf[AnyRef])
- def testMethod(meth: java.lang.reflect.Method): String =
- f.name+"("+(args mkString ",")+") gives "+
- {
- try {
- meth.invoke(this, args1: _*)
- } catch {
- case ex: IllegalAccessException => ex
- case ex: IllegalArgumentException => ex
- case ex: java.lang.reflect.InvocationTargetException => ex
- }
- }
- getClass.getMethods.toList filter (_.getName == f.name) match {
- case List() =>
- f.name+" is not defined"
- case List(m) =>
- testMethod(m)
- case ms => // multiple methods, disambiguate by number of arguments
- ms filter (_.getParameterTypes.length == args.length) match {
- case List() =>
- testMethod(ms.head) // go ahead anyway, to get an exception
- case List(m) =>
- testMethod(m)
- case ms =>
- "cannot disambiguate between multiple implementations of "+f.name
- }
- }
- }
-}
diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala
index b74fd152b5..59d5b1bf93 100644
--- a/src/library/scala/text/Document.scala
+++ b/src/library/scala/text/Document.scala
@@ -80,7 +80,7 @@ abstract class Document {
fmt(k, (i + ii, b, d) :: z)
case (i, true, DocBreak) :: z =>
writer write "\n"
- spaces(i);
+ spaces(i)
fmt(i, z)
case (i, false, DocBreak) :: z =>
writer write " "
diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala
index 159f1f02f4..5a5dd9a1f5 100644
--- a/src/library/scala/throws.scala
+++ b/src/library/scala/throws.scala
@@ -24,5 +24,5 @@ package scala
* @since 2.1
*/
class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation {
- def this(clazz: Class[T]) = this()
+ def this(clazz: Class[T]) = this("")
}
diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala
index 52cba6850d..963fe1c497 100644
--- a/src/library/scala/util/DynamicVariable.scala
+++ b/src/library/scala/util/DynamicVariable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import java.lang.InheritableThreadLocal
diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala
index dba11ed73c..1ed3f4becb 100644
--- a/src/library/scala/util/Either.scala
+++ b/src/library/scala/util/Either.scala
@@ -8,7 +8,8 @@
-package scala.util
+package scala
+package util
import scala.language.implicitConversions
@@ -21,7 +22,7 @@ import scala.language.implicitConversions
* [[scala.util.Right]] takes the place of [[scala.Some]]. Convention dictates
* that Left is used for failure and Right is used for success.
*
- * For example, you could use ``Either[String, Int]`` to detect whether a
+ * For example, you could use `Either[String, Int]` to detect whether a
* received input is a String or an Int.
*
* {{{
@@ -205,7 +206,7 @@ final case class Right[+A, +B](b: B) extends Either[A, B] {
object Either {
/**
- * Allows use of a ``merge`` method to extract values from Either instances
+ * Allows use of a `merge` method to extract values from Either instances
* regardless of whether they are Left or Right.
*
* {{{
@@ -221,8 +222,6 @@ object Either {
case Right(a) => a
}
}
- @deprecated("use MergeableEither instead", "2.10.0")
- def either2mergeable[A](x: Either[A, A]): MergeableEither[A] = new MergeableEither(x)
/**
* Projects an `Either` into a `Left`.
diff --git a/src/library/scala/util/Marshal.scala b/src/library/scala/util/Marshal.scala
deleted file mode 100644
index b78ed2140e..0000000000
--- a/src/library/scala/util/Marshal.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util
-
-/**
- * Marshalling of Scala objects using Scala tags.
- *
- * @author Stephane Micheloud
- * @version 1.0
- */
-@deprecated("This class will be removed", "2.10.0")
-object Marshal {
- import java.io._
- import scala.reflect.ClassTag
-
- def dump[A](o: A)(implicit t: ClassTag[A]): Array[Byte] = {
- val ba = new ByteArrayOutputStream(512)
- val out = new ObjectOutputStream(ba)
- out.writeObject(t)
- out.writeObject(o)
- out.close()
- ba.toByteArray()
- }
-
- @throws(classOf[IOException])
- @throws(classOf[ClassCastException])
- @throws(classOf[ClassNotFoundException])
- def load[A](buffer: Array[Byte])(implicit expected: ClassTag[A]): A = {
- val in = new ObjectInputStream(new ByteArrayInputStream(buffer))
- val found = in.readObject.asInstanceOf[ClassTag[_]]
- try {
- found.runtimeClass.asSubclass(expected.runtimeClass)
- in.readObject.asInstanceOf[A]
- } catch {
- case _: ClassCastException =>
- in.close()
- throw new ClassCastException("type mismatch;"+
- "\n found : "+found+
- "\n required: "+expected)
- }
- }
-}
diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala
index a5bc8faf8d..e05fe0875b 100644
--- a/src/library/scala/util/MurmurHash.scala
+++ b/src/library/scala/util/MurmurHash.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
/** An implementation of Austin Appleby's MurmurHash 3.0 algorithm
* (32 bit version); reference: http://code.google.com/p/smhasher
@@ -81,6 +82,7 @@ class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T =>
* needs to be called to finalize the hash.
*/
@deprecated("Use the object MurmurHash3 instead.", "2.10.0")
+// NOTE: Used by SBT 0.13.0-M2 and below
object MurmurHash {
// Magic values used for MurmurHash's 32 bit hash.
// Don't change these without consulting a hashing expert!
@@ -164,13 +166,13 @@ object MurmurHash {
var k = hiddenMagicB
var j = 0
while (j+1 < s.length) {
- val i = (s.charAt(j)<<16) + s.charAt(j+1);
+ val i = (s.charAt(j)<<16) + s.charAt(j+1)
h = extendHash(h,i,c,k)
c = nextMagicA(c)
k = nextMagicB(k)
j += 2
}
- if (j < s.length) h = extendHash(h,s.charAt(j),c,k)
+ if (j < s.length) h = extendHash(h,s.charAt(j).toInt,c,k)
finalizeHash(h)
}
diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala
index 3cd6eb8659..2240dde360 100644
--- a/src/library/scala/util/Properties.scala
+++ b/src/library/scala/util/Properties.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util
+package scala
+package util
import java.io.{ IOException, PrintWriter }
import java.util.jar.Attributes.{ Name => AttributeName }
@@ -59,6 +60,8 @@ private[scala] trait PropertiesTrait {
def envOrElse(name: String, alt: String) = Option(System getenv name) getOrElse alt
def envOrNone(name: String) = Option(System getenv name)
+ def envOrSome(name: String, alt: Option[String]) = envOrNone(name) orElse alt
+
// for values based on propFilename
def scalaPropOrElse(name: String, alt: String): String = scalaProps.getProperty(name, alt)
def scalaPropOrEmpty(name: String): String = scalaPropOrElse(name, "")
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index 24c4cd7a32..8d68c5be38 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.collection.mutable.ArrayBuffer
import scala.collection.generic.CanBuildFrom
@@ -17,7 +18,7 @@ import scala.language.{implicitConversions, higherKinds}
* @author Stephane Micheloud
*
*/
-class Random(val self: java.util.Random) {
+class Random(val self: java.util.Random) extends AnyRef with Serializable {
/** Creates a new random number generator using a single long seed. */
def this(seed: Long) = this(new java.util.Random(seed))
@@ -117,7 +118,7 @@ class Random(val self: java.util.Random) {
swap(n - 1, k)
}
- (bf(xs) ++= buf).result
+ (bf(xs) ++= buf).result()
}
/** Returns a Stream of pseudorandomly chosen alphanumeric characters,
diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala
index 7749543caa..89db57a55e 100644
--- a/src/library/scala/util/Try.scala
+++ b/src/library/scala/util/Try.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
import scala.collection.Seq
import scala.util.control.NonFatal
@@ -164,7 +165,7 @@ object Try {
}
-final case class Failure[+T](val exception: Throwable) extends Try[T] {
+final case class Failure[+T](exception: Throwable) extends Try[T] {
def isFailure: Boolean = true
def isSuccess: Boolean = false
def recoverWith[U >: T](f: PartialFunction[Throwable, Try[U]]): Try[U] =
diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala
index 89e1b58d95..5524b10afa 100644
--- a/src/library/scala/util/control/Breaks.scala
+++ b/src/library/scala/util/control/Breaks.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A class that can be instantiated for the break control abstraction.
* Example usage:
diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala
index 33c90c5815..7ed3d95cd3 100644
--- a/src/library/scala/util/control/ControlThrowable.scala
+++ b/src/library/scala/util/control/ControlThrowable.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** A marker trait indicating that the `Throwable` it is mixed into is
* intended for flow control.
diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala
index b97914c4b1..be6d03a145 100644
--- a/src/library/scala/util/control/Exception.scala
+++ b/src/library/scala/util/control/Exception.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
package control
import scala.collection.immutable.List
diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala
index 0d8cdfbace..11fb988e8e 100644
--- a/src/library/scala/util/control/NonFatal.scala
+++ b/src/library/scala/util/control/NonFatal.scala
@@ -6,12 +6,13 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/**
* Extractor of non-fatal Throwables. Will not match fatal errors like `VirtualMachineError`
* (for example, `OutOfMemoryError`, a subclass of `VirtualMachineError`), `ThreadDeath`,
- * `LinkageError`, `InterruptedException`, `ControlThrowable`, or `NotImplementedError`.
+ * `LinkageError`, `InterruptedException`, `ControlThrowable`.
* However, `StackOverflowError` is matched, i.e. considered non-fatal.
*
* Note that [[scala.util.control.ControlThrowable]], an internal Throwable, is not matched by
@@ -35,7 +36,7 @@ object NonFatal {
def apply(t: Throwable): Boolean = t match {
case _: StackOverflowError => true // StackOverflowError ok even though it is a VirtualMachineError
// VirtualMachineError includes OutOfMemoryError and other fatal errors
- case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable | _: NotImplementedError => false
+ case _: VirtualMachineError | _: ThreadDeath | _: InterruptedException | _: LinkageError | _: ControlThrowable => false
case _ => true
}
/**
diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala
index 955cee7657..ba3044c718 100644
--- a/src/library/scala/util/control/TailCalls.scala
+++ b/src/library/scala/util/control/TailCalls.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.control
+package scala
+package util.control
/** Methods exported by this object implement tail calls via trampolining.
* Tail calling methods have to return their result using `done` or call the
diff --git a/src/library/scala/util/grammar/HedgeRHS.scala b/src/library/scala/util/grammar/HedgeRHS.scala
deleted file mode 100644
index d1c11a2f99..0000000000
--- a/src/library/scala/util/grammar/HedgeRHS.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util.grammar
-
-@deprecated("This class will be removed", "2.10.0")
-abstract class HedgeRHS
-
-/** Right hand side of a hedge production, deriving a single tree. */
-@deprecated("This class will be removed", "2.10.0")
-case class ConsRHS(tnt: Int, hnt: Int) extends HedgeRHS
-
-/** Right hand side of a hedge production, deriving any hedge. */
-@deprecated("This class will be removed", "2.10.0")
-case object AnyHedgeRHS extends HedgeRHS
-
-/** Right hand side of a hedge production, deriving the empty hedge. */
-@deprecated("This class will be removed", "2.10.0")
-case object EmptyHedgeRHS extends HedgeRHS
diff --git a/src/library/scala/util/grammar/TreeRHS.scala b/src/library/scala/util/grammar/TreeRHS.scala
deleted file mode 100644
index ee72ea982d..0000000000
--- a/src/library/scala/util/grammar/TreeRHS.scala
+++ /dev/null
@@ -1,22 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util.grammar
-
-/** Right hand side of a tree production. */
-@deprecated("This class will be removed", "2.10.0")
-abstract class TreeRHS
-
-/** Right hand side of a tree production, labelled with a letter from an alphabet. */
-@deprecated("This class will be removed", "2.10.0")
-case class LabelledRHS[A](label: A, hnt: Int) extends TreeRHS
-
-@deprecated("This class will be removed", "2.10.0")
-case object AnyTreeRHS extends TreeRHS
diff --git a/src/library/scala/util/hashing/ByteswapHashing.scala b/src/library/scala/util/hashing/ByteswapHashing.scala
index a96945788c..470479725b 100644
--- a/src/library/scala/util/hashing/ByteswapHashing.scala
+++ b/src/library/scala/util/hashing/ByteswapHashing.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.hashing
+package scala
+package util.hashing
@@ -16,20 +17,20 @@ package scala.util.hashing
/** A fast multiplicative hash by Phil Bagwell.
*/
final class ByteswapHashing[T] extends Hashing[T] {
-
+
def hash(v: T) = byteswap32(v.##)
-
+
}
object ByteswapHashing {
-
+
private class Chained[T](h: Hashing[T]) extends Hashing[T] {
def hash(v: T) = byteswap32(h.hash(v))
}
-
+
/** Composes another `Hashing` with the Byteswap hash.
*/
def chain[T](h: Hashing[T]): Hashing[T] = new Chained(h)
-
+
}
diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala
index b57f858bed..2b72c1dbe3 100644
--- a/src/library/scala/util/hashing/Hashing.scala
+++ b/src/library/scala/util/hashing/Hashing.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.hashing
+package scala
+package util.hashing
import scala.annotation.implicitNotFound
diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala
index 0aa7e6f1cb..c85664349e 100644
--- a/src/library/scala/util/hashing/MurmurHash3.scala
+++ b/src/library/scala/util/hashing/MurmurHash3.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.hashing
+package scala
+package util.hashing
import java.lang.Integer.{ rotateLeft => rotl }
@@ -76,7 +77,7 @@ private[hashing] class MurmurHash3 {
h = mix(h, data)
i += 2
}
- if (i < str.length) h = mixLast(h, str.charAt(i))
+ if (i < str.length) h = mixLast(h, str.charAt(i).toInt)
finalizeHash(h, str.length)
}
diff --git a/src/library/scala/util/hashing/package.scala b/src/library/scala/util/hashing/package.scala
index 7d38f151f9..2c8e0154fc 100644
--- a/src/library/scala/util/hashing/package.scala
+++ b/src/library/scala/util/hashing/package.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util
+package scala
+package util
@@ -14,7 +15,7 @@ package scala.util
package object hashing {
-
+
/** Fast multiplicative hash with a nice distribution.
*/
def byteswap32(v: Int): Int = {
@@ -22,7 +23,7 @@ package object hashing {
hc = java.lang.Integer.reverseBytes(hc)
hc * 0x9e3775cd
}
-
+
/** Fast multiplicative hash with a nice distribution
* for 64-bit values.
*/
@@ -31,5 +32,5 @@ package object hashing {
hc = java.lang.Long.reverseBytes(hc)
hc * 0x9e3775cd9e3775cdL
}
-
+
}
diff --git a/src/library/scala/util/logging/ConsoleLogger.scala b/src/library/scala/util/logging/ConsoleLogger.scala
deleted file mode 100644
index 74f058b4ec..0000000000
--- a/src/library/scala/util/logging/ConsoleLogger.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-
-package scala.util.logging
-
-/**
- * The trait `ConsoleLogger` is mixed into a concrete class who
- * has class `Logged` among its base classes.
- *
- * @author Burak Emir
- * @version 1.0
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait ConsoleLogger extends Logged {
-
- /** logs argument to Console using [[scala.Console.println]]
- */
- override def log(msg: String): Unit = Console.println(msg)
-}
diff --git a/src/library/scala/util/logging/Logged.scala b/src/library/scala/util/logging/Logged.scala
deleted file mode 100644
index f2661d3206..0000000000
--- a/src/library/scala/util/logging/Logged.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.logging
-
-/** Mixing in Logged indicates that a class provides support for logging.
- *
- * For instance:
- * {{{
- * // The developer of the library writes:
- * class MyClass extends Logged {
- * // do stuff, call log
- * }
- *
- * // The user of the library instantiates:
- * val x = new MyClass() with ConsoleLogger
- * }}}
- * and the logging is sent to the [[scala.util.logging.ConsoleLogger]] object.
- */
-@deprecated("This class will be removed.", "2.10.0")
-trait Logged {
- /** This method should log the message given as argument somewhere
- * as a side-effect.
- *
- * @param msg message to be logged
- */
- def log(msg: String): Unit = {}
-}
diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala
index 716d746552..8eac0a2520 100644
--- a/src/library/scala/util/matching/Regex.scala
+++ b/src/library/scala/util/matching/Regex.scala
@@ -28,7 +28,8 @@
* into a [[java.lang.String]].
*
*/
-package scala.util.matching
+package scala
+package util.matching
import scala.collection.AbstractIterator
import java.util.regex.{ Pattern, Matcher }
@@ -131,7 +132,7 @@ import java.util.regex.{ Pattern, Matcher }
* @author Martin Odersky
* @version 1.1, 29/01/2008
*
- * @param regex A string representing a regular expression
+ * @param pattern The compiled pattern
* @param groupNames A mapping from names to indices in capture groups
*
* @define replacementString
@@ -144,49 +145,81 @@ import java.util.regex.{ Pattern, Matcher }
* to automatically escape these characters.
*/
@SerialVersionUID(-2094783597747625537L)
-class Regex(regex: String, groupNames: String*) extends Serializable {
+class Regex private[matching](val pattern: Pattern, groupNames: String*) extends Serializable {
outer =>
import Regex._
- /** The compiled pattern */
- val pattern = Pattern.compile(regex)
+ /**
+ * @param regex A string representing a regular expression
+ * @param groupNames A mapping from names to indices in capture groups
+ */
+ def this(regex: String, groupNames: String*) = this(Pattern.compile(regex), groupNames: _*)
- /** Tries to match target (whole match) and returns the matching subgroups.
- * if the pattern has no subgroups, then it returns an empty list on a
- * successful match.
- *
- * Note, however, that if some subgroup has not been matched, a `null` will
- * be returned for that subgroup.
+ /** Tries to match a [[java.lang.CharSequence]].
+ * If the match succeeds, the result is a list of the matching
+ * groups (or a `null` element if a group did not match any input).
+ * If the pattern specifies no groups, then the result will be an empty list
+ * on a successful match.
*
+ * This method attempts to match the entire input by default; to find the next
+ * matching subsequence, use an unanchored Regex.
+
* For example:
*
* {{{
* val p1 = "ab*c".r
- * val p2 = "a(b*)c".r
- *
* val p1Matches = "abbbc" match {
* case p1() => true
* case _ => false
* }
- *
+ * val p2 = "a(b*)c".r
* val numberOfB = "abbbc" match {
* case p2(b) => Some(b.length)
* case _ => None
* }
+ * val p3 = "b*".r.unanchored
+ * val p3Matches = "abbbc" match {
+ * case p3() => true
+ * case _ => false
+ * }
* }}}
*
+ * @param s The string to match
+ * @return The matches
+ */
+ def unapplySeq(s: CharSequence): Option[Seq[String]] = {
+ val m = pattern matcher s
+ if (runMatcher(m)) Some(1 to m.groupCount map m.group)
+ else None
+ }
+
+ /** Tries to match on a [[scala.util.matching.Regex.Match]].
+ * A previously failed match results in None.
+ * If a successful match was made against the current pattern, then that result is used.
+ * Otherwise, this Regex is applied to the previously matched input,
+ * and the result of that match is used.
+ */
+ def unapplySeq(m: Match): Option[Seq[String]] =
+ if (m.matched == null) None
+ else if (m.matcher.pattern == this.pattern) Some(1 to m.groupCount map m.group)
+ else unapplySeq(m.matched)
+
+ /** Tries to match target.
* @param target The string to match
* @return The matches
*/
+ @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0")
def unapplySeq(target: Any): Option[List[String]] = target match {
case s: CharSequence =>
val m = pattern matcher s
if (runMatcher(m)) Some((1 to m.groupCount).toList map m.group)
else None
- case m: Match => unapplySeq(m.matched)
- case _ => None
+ case m: Match => unapplySeq(m.matched)
+ case _ => None
}
+
+ // @see UnanchoredRegex
protected def runMatcher(m: Matcher) = m.matches()
/** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]],
@@ -200,7 +233,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.util.matching.Regex.MatchIterator]] of all matches.
* @example {{{for (words <- """\w+""".r findAllIn "A simple example.") yield words}}}
*/
- def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames)
+ def findAllIn(source: CharSequence) = new Regex.MatchIterator(source, this, groupNames)
/** Return all matches of this regexp in given character sequence as a
@@ -210,12 +243,12 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches.
* @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}}
*/
- def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = {
+ def findAllMatchIn(source: CharSequence): Iterator[Match] = {
val matchIterator = findAllIn(source)
new Iterator[Match] {
def hasNext = matchIterator.hasNext
def next: Match = {
- matchIterator.next;
+ matchIterator.next()
new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force
}
}
@@ -228,7 +261,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return An [[scala.Option]] of the first matching string in the text.
* @example {{{"""\w+""".r findFirstIn "A simple example." foreach println // prints "A"}}}
*/
- def findFirstIn(source: java.lang.CharSequence): Option[String] = {
+ def findFirstIn(source: CharSequence): Option[String] = {
val m = pattern.matcher(source)
if (m.find) Some(m.group) else None
}
@@ -245,7 +278,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.Option]] of [[scala.util.matching.Regex.Match]] of the first matching string in the text.
* @example {{{("""[a-z]""".r findFirstMatchIn "A simple example.") map (_.start) // returns Some(2), the index of the first match in the text}}}
*/
- def findFirstMatchIn(source: java.lang.CharSequence): Option[Match] = {
+ def findFirstMatchIn(source: CharSequence): Option[Match] = {
val m = pattern.matcher(source)
if (m.find) Some(new Match(source, m, groupNames)) else None
}
@@ -262,7 +295,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.Option]] of the matched prefix.
* @example {{{"""[a-z]""".r findPrefixOf "A simple example." // returns None, since the text does not begin with a lowercase letter}}}
*/
- def findPrefixOf(source: java.lang.CharSequence): Option[String] = {
+ def findPrefixOf(source: CharSequence): Option[String] = {
val m = pattern.matcher(source)
if (m.lookingAt) Some(m.group) else None
}
@@ -279,7 +312,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return A [[scala.Option]] of the [[scala.util.matching.Regex.Match]] of the matched string.
* @example {{{"""\w+""".r findPrefixMatchOf "A simple example." map (_.after) // returns Some(" simple example.")}}}
*/
- def findPrefixMatchOf(source: java.lang.CharSequence): Option[Match] = {
+ def findPrefixMatchOf(source: CharSequence): Option[Match] = {
val m = pattern.matcher(source)
if (m.lookingAt) Some(new Match(source, m, groupNames)) else None
}
@@ -293,7 +326,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return The resulting string
* @example {{{"""\d+""".r replaceAllIn ("July 15", "<NUMBER>") // returns "July <NUMBER>"}}}
*/
- def replaceAllIn(target: java.lang.CharSequence, replacement: String): String = {
+ def replaceAllIn(target: CharSequence, replacement: String): String = {
val m = pattern.matcher(target)
m.replaceAll(replacement)
}
@@ -316,7 +349,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @param replacer The function which maps a match to another string.
* @return The target string after replacements.
*/
- def replaceAllIn(target: java.lang.CharSequence, replacer: Match => String): String = {
+ def replaceAllIn(target: CharSequence, replacer: Match => String): String = {
val it = new Regex.MatchIterator(target, this, groupNames).replacementData
it foreach (md => it replace replacer(md))
it.replaced
@@ -343,7 +376,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @param replacer The function which optionally maps a match to another string.
* @return The target string after replacements.
*/
- def replaceSomeIn(target: java.lang.CharSequence, replacer: Match => Option[String]): String = {
+ def replaceSomeIn(target: CharSequence, replacer: Match => Option[String]): String = {
val it = new Regex.MatchIterator(target, this, groupNames).replacementData
for (matchdata <- it ; replacement <- replacer(matchdata))
it replace replacement
@@ -359,7 +392,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @param replacement The string that will replace the match
* @return The resulting string
*/
- def replaceFirstIn(target: java.lang.CharSequence, replacement: String): String = {
+ def replaceFirstIn(target: CharSequence, replacement: String): String = {
val m = pattern.matcher(target)
m.replaceFirst(replacement)
}
@@ -370,7 +403,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
* @return The array of strings computed by splitting the
* input around matches of this regexp
*/
- def split(toSplit: java.lang.CharSequence): Array[String] =
+ def split(toSplit: CharSequence): Array[String] =
pattern.split(toSplit)
/** Create a new Regex with the same pattern, but no requirement that
@@ -390,9 +423,11 @@ class Regex(regex: String, groupNames: String*) extends Serializable {
*
* @return The new unanchored regex
*/
- def unanchored: UnanchoredRegex = new Regex(regex, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
+ def unanchored: UnanchoredRegex = new Regex(pattern, groupNames: _*) with UnanchoredRegex { override def anchored = outer }
def anchored: Regex = this
+ def regex: String = pattern.pattern
+
/** The string defining the regular expression */
override def toString = regex
}
@@ -421,7 +456,7 @@ object Regex {
trait MatchData {
/** The source from where the match originated */
- val source: java.lang.CharSequence
+ val source: CharSequence
/** The names of the groups, or some empty sequence if one defined */
val groupNames: Seq[String]
@@ -459,25 +494,25 @@ object Regex {
/** The char sequence before first character of match,
* or `null` if nothing was matched */
- def before: java.lang.CharSequence =
+ def before: CharSequence =
if (start >= 0) source.subSequence(0, start)
else null
/** The char sequence before first character of match in group `i`,
* or `null` if nothing was matched for that group */
- def before(i: Int): java.lang.CharSequence =
+ def before(i: Int): CharSequence =
if (start(i) >= 0) source.subSequence(0, start(i))
else null
/** Returns char sequence after last character of match,
* or `null` if nothing was matched */
- def after: java.lang.CharSequence =
+ def after: CharSequence =
if (end >= 0) source.subSequence(end, source.length)
else null
/** The char sequence after last character of match in group `i`,
* or `null` if nothing was matched for that group */
- def after(i: Int): java.lang.CharSequence =
+ def after(i: Int): CharSequence =
if (end(i) >= 0) source.subSequence(end(i), source.length)
else null
@@ -501,8 +536,8 @@ object Regex {
/** Provides information about a succesful match.
*/
- class Match(val source: java.lang.CharSequence,
- matcher: Matcher,
+ class Match(val source: CharSequence,
+ private[matching] val matcher: Matcher,
val groupNames: Seq[String]) extends MatchData {
/** The index of the first matched character */
@@ -563,7 +598,7 @@ object Regex {
/** A class to step through a sequence of regex matches
*/
- class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String])
+ class MatchIterator(val source: CharSequence, val regex: Regex, val groupNames: Seq[String])
extends AbstractIterator[String] with Iterator[String] with MatchData { self =>
protected[Regex] val matcher = regex.pattern.matcher(source)
@@ -602,14 +637,14 @@ object Regex {
/** Convert to an iterator that yields MatchData elements instead of Strings */
def matchData: Iterator[Match] = new AbstractIterator[Match] {
def hasNext = self.hasNext
- def next = { self.next; new Match(source, matcher, groupNames).force }
+ def next = { self.next(); new Match(source, matcher, groupNames).force }
}
/** Convert to an iterator that yields MatchData elements instead of Strings and has replacement support */
private[matching] def replacementData = new AbstractIterator[Match] with Replacement {
def matcher = self.matcher
def hasNext = self.hasNext
- def next = { self.next; new Match(source, matcher, groupNames).force }
+ def next = { self.next(); new Match(source, matcher, groupNames).force }
}
}
@@ -620,7 +655,7 @@ object Regex {
private[matching] trait Replacement {
protected def matcher: Matcher
- private var sb = new java.lang.StringBuffer
+ private val sb = new java.lang.StringBuffer
def replaced = {
val newsb = new java.lang.StringBuffer(sb)
diff --git a/src/library/scala/util/parsing/ast/AbstractSyntax.scala b/src/library/scala/util/parsing/ast/AbstractSyntax.scala
deleted file mode 100644
index 30b20d71c6..0000000000
--- a/src/library/scala/util/parsing/ast/AbstractSyntax.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing.ast
-
-import scala.util.parsing.input.Positional
-
-/** This component provides the core abstractions for representing an Abstract Syntax Tree
- *
- * @author Adriaan Moors
- */
-@deprecated("This class will be removed", "2.10.0")
-trait AbstractSyntax {
- /** The base class for elements of the abstract syntax tree.
- */
- trait Element extends Positional
-
- /** The base class for elements in the AST that represent names [[scala.util.parsing.ast.Binders]].
- */
- trait NameElement extends Element {
- def name: String
- override def equals(that: Any): Boolean = that match {
- case n: NameElement => n.name == name
- case _ => false
- }
- }
-}
diff --git a/src/library/scala/util/parsing/ast/Binders.scala b/src/library/scala/util/parsing/ast/Binders.scala
deleted file mode 100644
index a6ad1907c2..0000000000
--- a/src/library/scala/util/parsing/ast/Binders.scala
+++ /dev/null
@@ -1,347 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing.ast
-
-import scala.collection.AbstractIterable
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-//DISCLAIMER: this code is highly experimental!
-
- // TODO: avoid clashes when substituting
- // TODO: check binders in the same scope are distinct
-
-/** This trait provides the core ''Scrap-Your-Boilerplate'' abstractions as
- * well as implementations for common datatypes.
- *
- * (Based on Ralf Lämmel's [[http://homepages.cwi.nl/~ralf/syb3/ SYB papers]].)
- *
- * @author Adriaan Moors
- */
-@deprecated("This class will be removed", "2.10.0")
-trait Mappable {
- trait Mapper { def apply[T <% Mappable[T]](x: T): T } /* TODO: having type `Forall T. T => T` is too strict:
- sometimes we want to allow `Forall T >: precision. T => T` for some type `precision`, so that,
- beneath a certain threshold, we have some leeway.
- concretely: to use gmap for substitution, we simply require that ast nodes are mapped to ast nodes,
- we can't require that the type is preserved precisely: a Name may map to e.g., a MethodCall
- */
-
- trait Mappable[T] {
- // one-layer traversal
- def gmap(f: Mapper): T
- // everywhere f x = f (gmapT (everywhere f) x)
- def everywhere(f: Mapper)(implicit c: T => Mappable[T]): T =
- f(gmap(new Mapper { def apply[T <% Mappable[T]](x: T): T = x.everywhere(f)}))
- }
-
- implicit def StringIsMappable(s: String): Mappable[String] =
- new Mappable[String] {
- def gmap(f: Mapper): String = f(s)
- }
-
- implicit def ListIsMappable[t <% Mappable[t]](xs: List[t]): Mappable[List[t]] =
- new Mappable[List[t]] {
- def gmap(f: Mapper): List[t] = (for (x <- xs) yield f(x)).toList
- }
-
- implicit def OptionIsMappable[t <% Mappable[t]](xs: Option[t]): Mappable[Option[t]] =
- new Mappable[Option[t]] {
- def gmap(f: Mapper): Option[t] = (for (x <- xs) yield f(x))
- }
-}
-
-/** This component provides functionality for enforcing variable binding
- * during parse-time.
- *
- * When parsing simple languages, like Featherweight Scala, these parser
- * combinators will fully enforce the binding discipline. When names are
- * allowed to be left unqualified, these mechanisms would have to be
- * complemented by an extra phase that resolves names that couldn't be
- * resolved using the naive binding rules. (Maybe some machinery to
- * model `implicit` binders (e.g., `this` and imported qualifiers)
- * and selection on a binder will suffice?)
- *
- * @author Adriaan Moors
- */
-trait Binders extends AbstractSyntax with Mappable {
- /** A `Scope` keeps track of one or more syntactic elements that represent bound names.
- * The elements it contains share the same scope and must all be distinct, as determined by `==`.
- *
- * A `NameElement` `n` in the AST that is conceptually bound by a `Scope` `s`, is replaced by a
- * `BoundElement(n, s)`. (For example, in `val x:Int=x+1`, the first `x` is modelled by a
- * Scope `s` that contains `x` and the second `x` is represented by a `BoundElement(x, s)`)
- * The term (`x+1`) in scope of the Scope becomes an `UnderBinder(s, x+1)`.
- *
- * A `NameElement` `n` is bound by a `Scope` `s` if it is wrapped as a `BoundElement(n, s)`, and
- * `s` has a binder element that is semantically equal (`equals` or `==`) to `n`.
- *
- * A `Scope` is represented textually by its list of binder elements, followed by the scope's `id`.
- * For example: `[x, y]!1` represents the scope with `id` `1` and binder elements `x` and `y`.
- * (`id` is solely used for this textual representation.)
- */
- class Scope[binderType <: NameElement] extends AbstractIterable[binderType] with Iterable[binderType] {
- private val substitution: mutable.Map[binderType, Element] =
- new mutable.LinkedHashMap[binderType, Element] // a LinkedHashMap is ordered by insertion order -- important!
-
- /** Returns a unique number identifying this Scope (only used for representation purposes). */
- val id: Int = _Binder.genId
-
- /** Returns the binders in this scope.
- * For a typical let-binding, this is just the variable name. For an argument list to a method body,
- * there is one binder per formal argument.
- */
- def iterator = substitution.keysIterator
-
- /** Return the `i`th binder in this scope. */
- def apply(i: Int): binderType = this.iterator.toList(i)
-
- /** Returns true if this container has a binder equal (as determined by `==`) to `b`. */
- def binds(b: binderType): Boolean = substitution.contains(b)
-
- def indexFor(b: binderType): Option[Int] = {
- val iter = this.iterator.zipWithIndex
- for ((that, count) <- iter) {
- if (that.name == b.name) // TODO: why do name equals and structural equals differ?
- return Some(count + 1)
- else
- Console.println(that+"!="+b)
- }
-
- None
- }
-
- /** Adds a new binder, for example the variable name in a local variable declaration.
- *
- * @param b a new binder that is distinct from the existing binders in this scope,
- * and shares their conceptual scope. `canAddBinder(b)` must hold.
- * @return `binds(b)` and `getElementFor(b) eq b` will hold.
- */
- def addBinder(b: binderType) { substitution += Pair(b, b) }
-
- // TODO: strengthen this condition so that no binders may be added after this scope has been
- // linked to its `UnderBinder` (i.e., while parsing, BoundElements may be added to the Scope
- // associated to the UnderBinder, but after that, no changes are allowed, except for substitution)?
- /** `canAddElement` indicates whether `b` may be added to this scope.
- *
- *
- * @return true if `b` had not been added yet
- */
- def canAddBinder(b: binderType): Boolean = !binds(b)
-
- /** ''Replaces'' the bound occurrences of a contained binder by their new value.
- * The bound occurrences of `b` are not actually replaced; the scope keeps track
- * of a substitution that maps every binder to its current value. Since a `BoundElement` is
- * a proxy for the element it is bound to by its binder, `substitute` may thus be thought of
- * as replacing all the bound occurrences of the given binder `b` by their new value `value`.
- *
- * @param b the binder whose bound occurrences should be given a new value. `binds(b)` must hold.
- * @param value the new value for the bound occurrences of `b`
- * @return `getElementFor(b) eq value` will hold.
- */
- def substitute(b: binderType, value: Element): Unit = substitution(b) = value
-
- /** Returns the current value for the bound occurrences of `b`.
- *
- * @param b the contained binder whose current value should be returned `binds(b)` must hold.
- */
- def getElementFor(b: binderType): Element = substitution(b)
-
- override def toString: String = this.iterator.toList.mkString("[",", ","]")+"!"+id // TODO show substitution?
-
- /** Returns a list of strings that represent the binder elements, each tagged with this scope's id. */
- def bindersToString: List[String] = (for(b <- this.iterator) yield b+"!"+id).toList
-
- /** Return a new inheriting scope that won't check whether binding is respected until the scope is left (so as to support forward references). */
- def allowForwardRef: Scope[binderType] = this // TODO
-
- /** Return a nested scope -- binders entered into it won't be visible in this scope, but if this scope allows forward references,
- * the binding in the returned scope also does, and thus the check that all variables are bound is deferred until this scope is left.
- */
- def nested: Scope[binderType] = this // TODO
-
- def onEnter() {}
- def onLeft() {}
- }
-
-
- trait BindingSensitive {
- // would like to specify this as one method:
- // def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean
- // def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean
- }
-
- /** A `BoundElement` is bound in a certain scope `scope`, which keeps track of the actual element that
- * `el` stands for.
- *
- * A `BoundElement` is represented textually by its bound element, followed by its scope's `id`.
- * For example: `x@1` represents the variable `x` that is bound in the scope with `id` `1`.
- *
- * @note `scope.binds(el)` holds before and after.
- */
- case class BoundElement[boundElement <: NameElement](el: boundElement, scope: Scope[boundElement]) extends NameElement with Proxy with BindingSensitive {
- /** Returns the element this `BoundElement` stands for.
- * The `Proxy` trait ensures `equals`, `hashCode` and `toString` are forwarded to
- * the result of this method.
- */
- def self: Element = scope.getElementFor(el)
-
- def name = self.asInstanceOf[NameElement].name // TODO: this is only safe when substituted to a NameElement, which certainly isn't required -- I want dynamic inheritance! :)
-
- // decorate element's representation with the id of the scope it's bound in
- override def toString: String = super.toString+"@"+scope.id
-
- def alpha_==[t <: NameElement](other: BoundElement[t]): Boolean = scope.indexFor(el) == other.scope.indexFor(other.el)
- }
-
- /** A variable that escaped its scope (i.e., a free variable) -- we don't deal very well with these yet. */
- class UnboundElement[N <: NameElement](private val el: N) extends NameElement {
- def name = el.name+"@??"
- }
-
- // this is useless, as Element is a supertype of BoundElement --> the coercion will never be inferred
- // if we knew a more specific type for the element that the bound element represents, this could make sense
- // implicit def BoundElementProxy[t <: NameElement](e: BoundElement[t]): Element = e.self
-
- /** Represents an element with variables that are bound in a certain scope. */
- class UnderBinder[binderType <: NameElement, elementT <% Mappable[elementT]](val scope: Scope[binderType], private[Binders] val element: elementT) extends Element with BindingSensitive {
- override def toString: String = "(" + scope.toString + ") in { "+element.toString+" }"
-
- /** Alpha-equivalence -- TODO
- * Returns true if the `element` of the `other` `UnderBinder` is equal to this `element` up to alpha-conversion.
- *
- * That is, regular equality is used for all elements but `BoundElement`s: such an element is
- * equal to a `BoundElement` in `other` if their binders are equal. Binders are equal if they
- * are at the same index in their respective scope.
- *
- * Example:
- * {{{
- * UnderBinder([x, y]!1, x@1) alpha_== UnderBinder([a, b]!2, a@2)
- * ! (UnderBinder([x, y]!1, y@1) alpha_== UnderBinder([a, b]!2, a@2))
- * }}}
- */
- /*def alpha_==[bt <: binderType, st <: elementT](other: UnderBinder[bt, st]): Boolean = {
- var result = true
-
- // TODO: generic zip or gmap2
- element.gmap2(other.element, new Mapper2 {
- def apply[s <% Mappable[s], t <% Mappable[t]](x :{s, t}): {s, t} = x match {
- case {be1: BoundElement[_], be2: BoundElement[_]} => result == result && be1.alpha_==(be2) // monadic gmap (cheating using state directly)
- case {ub1: UnderBinder[_, _], ub2: UnderBinder[_, _]} => result == result && be1.alpha_==(be2)
- case {a, b} => result == result && a.equals(b)
- }; x
- })
- }*/
-
- def cloneElementWithSubst(subst: Map[NameElement, NameElement]) = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
- case substable: NameElement if subst.contains(substable) => subst.get(substable).asInstanceOf[t] // TODO: wrong... substitution is not (necessarily) the identity function
- //Console.println("substed: "+substable+"-> "+subst.get(substable)+")");
- case x => x // Console.println("subst: "+x+"(keys: "+subst.keys+")");x
- }})
-
- // TODO
- def cloneElementNoBoundElements = element.gmap(new Mapper { def apply[t <% Mappable[t]](x :t): t = x match{
- case BoundElement(el, _) => new UnboundElement(el).asInstanceOf[t] // TODO: precision stuff
- case x => x
- }})
-
- def extract: elementT = cloneElementNoBoundElements
- def extract(subst: Map[NameElement, NameElement]): elementT = cloneElementWithSubst(subst)
-
- /** Get a string representation of element, normally we don't allow direct access to element, but just getting a string representation is ok. */
- def elementToString: String = element.toString
- }
-
- //SYB type class instances
- implicit def UnderBinderIsMappable[bt <: NameElement <% Mappable[bt], st <% Mappable[st]](ub: UnderBinder[bt, st]): Mappable[UnderBinder[bt, st]] =
- new Mappable[UnderBinder[bt, st]] {
- def gmap(f: Mapper): UnderBinder[bt, st] = UnderBinder(f(ub.scope), f(ub.element))
- }
-
- implicit def ScopeIsMappable[bt <: NameElement <% Mappable[bt]](scope: Scope[bt]): Mappable[Scope[bt]] =
- new Mappable[Scope[bt]] {
- def gmap(f: Mapper): Scope[bt] = { val newScope = new Scope[bt]()
- for(b <- scope) newScope.addBinder(f(b))
- newScope
- }
- }
-
- implicit def NameElementIsMappable(self: NameElement): Mappable[NameElement] = new Mappable[NameElement] {
- def gmap(f: Mapper): NameElement = self match {
- case BoundElement(el, scope) => BoundElement(f(el), f(scope))
- case _ => UserNameElementIsMappable(self).gmap(f)
- }
- }
-
- def UserNameElementIsMappable[t <: NameElement](self: t): Mappable[t]
-
- object UnderBinder {
- def apply[binderType <: NameElement, elementT <% Mappable[elementT]](scope: Scope[binderType], element: elementT) = new UnderBinder(scope, element)
- def unit[bt <: NameElement, elementT <% Mappable[elementT]](x: elementT) = UnderBinder(new Scope[bt](), x)
- }
-
- /** If a list of `UnderBinder`s all have the same scope, they can be turned in to an `UnderBinder`
- * containing a list of the elements in the original `UnderBinder`.
- *
- * The name `sequence` comes from the fact that this method's type is equal to the type of monadic sequence.
- *
- * @note `!orig.isEmpty` implies `orig.forall(ub => ub.scope eq orig(0).scope)`
- *
- */
- def sequence[bt <: NameElement, st <% Mappable[st]](orig: List[UnderBinder[bt, st]]): UnderBinder[bt, List[st]] =
- if(orig.isEmpty) UnderBinder.unit(Nil)
- else UnderBinder(orig(0).scope, orig.map(_.element))
-
- // couldn't come up with a better name...
- def unsequence[bt <: NameElement, st <% Mappable[st]](orig: UnderBinder[bt, List[st]]): List[UnderBinder[bt, st]] =
- orig.element.map(sc => UnderBinder(orig.scope, sc))
-
- //TODO: more documentation
- /** An environment that maps a `NameElement` to the scope in which it is bound.
- * This can be used to model scoping during parsing.
- *
- * @note This class uses similar techniques as described by ''Burak Emir'' in
- * [[http://library.epfl.ch/theses/?nr=3899 Object-oriented pattern matching]],
- * but uses `==` instead of `eq`, thus types can't be unified in general.
- */
- abstract class BinderEnv {
- def apply[A <: NameElement](v: A): Option[Scope[A]]
- def extend[a <: NameElement](v : a, x : Scope[a]) = new BinderEnv {
- def apply[b <: NameElement](w : b): Option[Scope[b]] =
- if(w == v) Some(x.asInstanceOf[Scope[b]])
- else BinderEnv.this.apply(w)
- }
- }
-
- object EmptyBinderEnv extends BinderEnv {
- def apply[A <: NameElement](v: A): Option[Scope[A]] = None
- }
-
- // TODO: move this to some utility object higher in the scala hierarchy?
- /** Returns a given result, but executes the supplied closure before returning.
- * (The effect of this closure does not influence the returned value.)
- */
- trait ReturnAndDo[T]{
- /**
- * @param block code to be executed, purely for its side-effects
- */
- def andDo(block: => Unit): T
- }
-
- def return_[T](result: T): ReturnAndDo[T] =
- new ReturnAndDo[T] {
- val r = result
- def andDo(block: => Unit): T = {block; r}
- }
-
- private object _Binder {
- private var currentId = 0
- private[Binders] def genId = return_(currentId) andDo {currentId=currentId+1}
- }
-}
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
index ad06749c0d..0683ea927d 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.language.implicitConversions
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index 89832d3fb2..01288a182e 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.annotation.migration
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index 16705d45f9..a11dd18e62 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -6,9 +6,9 @@
** |/ **
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
-import scala.util.parsing.combinator._
import scala.util.parsing.input.{ Reader, Position }
import scala.collection.mutable
import scala.language.implicitConversions
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index ead444653e..4602c3cc53 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import scala.util.parsing.input._
import scala.collection.mutable.ListBuffer
@@ -157,12 +158,6 @@ trait Parsers {
private lazy val lastNoSuccessVar = new DynamicVariable[Option[NoSuccess]](None)
- @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
- def lastNoSuccess: NoSuccess = lastNoSuccessVar.value.orNull
-
- @deprecated("lastNoSuccess was not thread-safe and will be removed in 2.11.0", "2.10.0")
- def lastNoSuccess_=(x: NoSuccess): Unit = lastNoSuccessVar.value = Option(x)
-
/** A common super-class for unsuccessful parse results. */
sealed abstract class NoSuccess(val msg: String, override val next: Input) extends ParseResult[Nothing] { // when we don't care about the difference between Failure and Error
val successful = false
@@ -758,7 +753,7 @@ trait Parsers {
if (elems.length == num) Success(elems.toList, in0)
else p0(in0) match {
case Success(x, rest) => elems += x ; applyp(rest)
- case ns: NoSuccess => return ns
+ case ns: NoSuccess => ns
}
applyp(in)
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/library/scala/util/parsing/combinator/RegexParsers.scala
index d17d0cac8d..8ebbc573ad 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/library/scala/util/parsing/combinator/RegexParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.combinator
+package scala
+package util.parsing.combinator
import java.util.regex.Pattern
import scala.util.matching.Regex
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
index c25c97278f..d8029d068f 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package lexical
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
index 5c23ad70cd..2e12915bb8 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
@@ -6,13 +6,11 @@
** |/ **
\* */
-
-
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package lexical
-import token._
import input._
/** This component provides core functionality for lexical parsers.
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
index f3491c096f..32d7502cda 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package lexical
@@ -50,7 +51,7 @@ class StdLexical extends Lexical with StdTokens {
def identChar = letter | elem('_')
// see `whitespace in `Scanners`
- def whitespace: Parser[Any] = rep(
+ def whitespace: Parser[Any] = rep[Any](
whitespaceChar
| '/' ~ '*' ~ comment
| '/' ~ '/' ~ rep( chrExcept(EofCh, '\n') )
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index d3ae0ea54a..5b9d14c9a7 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package syntactical
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index 7283b01da4..adcf85da7a 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package syntactical
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index 1c4b25b999..b06babcd7e 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package syntactical
diff --git a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala b/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
deleted file mode 100644
index 80e9b0df39..0000000000
--- a/src/library/scala/util/parsing/combinator/testing/RegexTest.scala
+++ /dev/null
@@ -1,27 +0,0 @@
-
-package scala.util.parsing.combinator.testing
-
-import scala.util.parsing.combinator._
-import scala.util.parsing.input._
-import scala.language.postfixOps
-
-@deprecated("This class will be removed", "2.10.0")
-case class Ident(s: String)
-@deprecated("This class will be removed", "2.10.0")
-case class Number(n: Int)
-@deprecated("This class will be removed", "2.10.0")
-case class Str(s: String)
-
-@deprecated("This class will be removed", "2.10.0")
-object RegexTest extends RegexParsers {
- val ident: Parser[Any] = """[a-zA-Z_]\w*""".r ^^ (s => Ident(s))
- val number: Parser[Any] = """\d\d*""".r ^^ (s => Number(s.toInt))
- val string: Parser[Any] = "\".*\"".r ^^ (s => Str(s.substring(1, s.length - 1)))
- val parser = (ident | number | string)*
-
- def main(args: Array[String]) = {
- val in = args mkString " "
- println("\nin : "+in)
- println(phrase[Any](parser)(new CharSequenceReader(in)))
- }
-}
diff --git a/src/library/scala/util/parsing/combinator/testing/Tester.scala b/src/library/scala/util/parsing/combinator/testing/Tester.scala
deleted file mode 100644
index 95730ee292..0000000000
--- a/src/library/scala/util/parsing/combinator/testing/Tester.scala
+++ /dev/null
@@ -1,45 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.util.parsing.combinator.testing
-import scala.util.parsing.combinator._
-
-import scala.util.parsing.combinator.lexical.Lexical
-import scala.util.parsing.combinator.syntactical.TokenParsers
-
-/** Facilitates testing a given parser on various input strings.
- *
- * Example use:
- * {{{
- * val syntactic = new MyParsers
- * }}}
- * and
- * {{{
- * val parser = syntactic.term
- * }}}
- * (If `MyParsers` extends [[scala.util.parsing.combinator.syntactical.TokenParsers]]
- * with a parser called `term`.)
- *
- * @author Martin Odersky
- * @author Adriaan Moors
- */
-@deprecated("This class will be removed", "2.10.0")
-abstract class Tester {
-
- val syntactic: TokenParsers { val lexical: Lexical }
- val parser: syntactic.Parser[Any]
-
- /** Scans a String (using a `syntactic.lexical.Scanner`), parses it using
- * `phrase(parser)`, and prints the input and the parsed result to the
- * console.
- */
- def test(in: String) {
- Console.println("\nin : "+in)
- Console.println(syntactic.phrase[Any](parser)(new syntactic.lexical.Scanner(in)))
- }
-}
diff --git a/src/library/scala/util/parsing/combinator/token/StdTokens.scala b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
index 605f53bf1d..a102d1541e 100644
--- a/src/library/scala/util/parsing/combinator/token/StdTokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/StdTokens.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package token
diff --git a/src/library/scala/util/parsing/combinator/token/Tokens.scala b/src/library/scala/util/parsing/combinator/token/Tokens.scala
index ff92802d77..5c3f1f95b5 100644
--- a/src/library/scala/util/parsing/combinator/token/Tokens.scala
+++ b/src/library/scala/util/parsing/combinator/token/Tokens.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing
+package scala
+package util.parsing
package combinator
package token
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/library/scala/util/parsing/input/CharArrayReader.scala
index 3ba69b229b..22530cb9aa 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/library/scala/util/parsing/input/CharArrayReader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** An object encapsulating basic character constants.
*
diff --git a/src/library/scala/util/parsing/input/CharSequenceReader.scala b/src/library/scala/util/parsing/input/CharSequenceReader.scala
index 02aa2ab7b8..8e7751cc82 100644
--- a/src/library/scala/util/parsing/input/CharSequenceReader.scala
+++ b/src/library/scala/util/parsing/input/CharSequenceReader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** An object encapsulating basic character constants.
*
diff --git a/src/library/scala/util/parsing/input/NoPosition.scala b/src/library/scala/util/parsing/input/NoPosition.scala
index 40584b3293..4a32264b79 100644
--- a/src/library/scala/util/parsing/input/NoPosition.scala
+++ b/src/library/scala/util/parsing/input/NoPosition.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** Undefined position.
*
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/library/scala/util/parsing/input/OffsetPosition.scala
index 01d9ea5cb8..23f79c74d1 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/library/scala/util/parsing/input/OffsetPosition.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
import scala.collection.mutable.ArrayBuffer
@@ -22,7 +23,7 @@ case class OffsetPosition(source: java.lang.CharSequence, offset: Int) extends P
/** An index that contains all line starts, including first line, and eof. */
private lazy val index: Array[Int] = {
- var lineStarts = new ArrayBuffer[Int]
+ val lineStarts = new ArrayBuffer[Int]
lineStarts += 0
for (i <- 0 until source.length)
if (source.charAt(i) == '\n') lineStarts += (i + 1)
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/library/scala/util/parsing/input/PagedSeqReader.scala
index 9140bf2a4f..468f1f9a5f 100644
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ b/src/library/scala/util/parsing/input/PagedSeqReader.scala
@@ -7,7 +7,8 @@
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
import scala.collection.immutable.PagedSeq
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/library/scala/util/parsing/input/Position.scala
index 31715bd8da..b7995a6471 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/library/scala/util/parsing/input/Position.scala
@@ -6,15 +6,16 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
-/** `Position` is the base trait for objects describing a position in a ``document''.
+/** `Position` is the base trait for objects describing a position in a `document`.
*
* It provides functionality for:
* - generating a visual representation of this position (`longString`);
* - comparing two positions (`<`).
*
- * To use this class for a concrete kind of ``document'', implement the `lineContents` method.
+ * To use this class for a concrete kind of `document`, implement the `lineContents` method.
*
* @author Martin Odersky
* @author Adriaan Moors
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/library/scala/util/parsing/input/Positional.scala
index 87cb16eac5..cfde67cadd 100644
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ b/src/library/scala/util/parsing/input/Positional.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** A trait for objects that have a source position.
*
diff --git a/src/library/scala/util/parsing/input/Reader.scala b/src/library/scala/util/parsing/input/Reader.scala
index bded57bee1..9dbf08a7ca 100644
--- a/src/library/scala/util/parsing/input/Reader.scala
+++ b/src/library/scala/util/parsing/input/Reader.scala
@@ -8,7 +8,8 @@
-package scala.util.parsing.input
+package scala
+package util.parsing.input
/** An interface for streams of values that have positions.
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/library/scala/util/parsing/input/StreamReader.scala
index ba7ab65845..30eb097fd7 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/library/scala/util/parsing/input/StreamReader.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.input
+package scala
+package util.parsing.input
import java.io.BufferedReader
import scala.collection.immutable.PagedSeq
@@ -22,7 +23,7 @@ object StreamReader {
*
* @param in the `java.io.Reader` that provides the underlying
* stream of characters for this Reader.
- */
+ */
def apply(in: java.io.Reader): StreamReader = {
new StreamReader(PagedSeq.fromReader(in), 0, 1)
}
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/library/scala/util/parsing/json/JSON.scala
index 2f450ed864..b06dddf532 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/library/scala/util/parsing/json/JSON.scala
@@ -6,10 +6,8 @@
** |/ **
\* */
-package scala.util.parsing.json
-import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
+package scala
+package util.parsing.json
/**
* This object provides a simple interface to the JSON parser class.
@@ -30,6 +28,7 @@ import scala.util.parsing.combinator.lexical._
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This object will be removed.", "2.11.0")
object JSON extends Parser {
/**
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
index 991b5d5c6c..7fc4e0bab6 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/library/scala/util/parsing/json/Lexer.scala
@@ -8,16 +8,17 @@
-package scala.util.parsing.json
+package scala
+package util.parsing.json
import scala.util.parsing.combinator._
-import scala.util.parsing.combinator.syntactical._
import scala.util.parsing.combinator.lexical._
import scala.util.parsing.input.CharArrayReader.EofCh
/**
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
class Lexer extends StdLexical with ImplicitConversions {
override def token: Parser[Token] =
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/library/scala/util/parsing/json/Parser.scala
index cb87866f07..521dfc6612 100644
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ b/src/library/scala/util/parsing/json/Parser.scala
@@ -8,17 +8,18 @@
-package scala.util.parsing.json
+package scala
+package util.parsing.json
import scala.util.parsing.combinator._
import scala.util.parsing.combinator.syntactical._
-import scala.util.parsing.combinator.lexical._
/**
* A marker class for the JSON result types.
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
sealed abstract class JSONType {
/**
* This version of toString allows you to provide your own value
@@ -40,6 +41,7 @@ sealed abstract class JSONType {
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This object will be removed.", "2.11.0")
object JSONFormat {
/**
* This type defines a function that can be used to
@@ -81,7 +83,7 @@ object JSONFormat {
* Per RFC4627, section 2.5, we're not technically required to
* encode the C1 codes, but we do to be safe.
*/
- case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => "\\u%04x".format(c: Int)
+ case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => "\\u%04x".format(c.toInt)
case c => c
}.mkString
}
@@ -91,6 +93,7 @@ object JSONFormat {
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
case class JSONObject (obj : Map[String,Any]) extends JSONType {
def toString (formatter : JSONFormat.ValueFormatter) =
"{" + obj.map({ case (k,v) => formatter(k.toString) + " : " + formatter(v) }).mkString(", ") + "}"
@@ -100,6 +103,7 @@ case class JSONObject (obj : Map[String,Any]) extends JSONType {
* Represents a JSON Array (list).
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
case class JSONArray (list : List[Any]) extends JSONType {
def toString (formatter : JSONFormat.ValueFormatter) =
"[" + list.map(formatter).mkString(", ") + "]"
@@ -110,6 +114,7 @@ case class JSONArray (list : List[Any]) extends JSONType {
*
* @author Derek Chen-Becker <"java"+@+"chen-becker"+"."+"org">
*/
+@deprecated("This class will be removed.", "2.11.0")
class Parser extends StdTokenParsers with ImplicitConversions {
// Fill in abstract defs
type Tokens = Lexer
diff --git a/src/library/scala/xml/Atom.scala b/src/library/scala/xml/Atom.scala
index cba0b96875..33e58ba7e7 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/library/scala/xml/Atom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `Atom` provides an XML node for text (`PCDATA`).
* It is used in both non-bound and bound XML representations.
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 0224913cf6..e4b2b69fc6 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This singleton object contains the `apply` and `unapply` methods for
* convenient construction and deconstruction.
@@ -94,7 +95,7 @@ abstract trait Attribute extends MetaData {
sb append key append '='
val sb2 = new StringBuilder()
- Utility.sequenceToXML(value, TopScope, sb2, true)
+ Utility.sequenceToXML(value, TopScope, sb2, stripComments = true)
Utility.appendQuoted(sb2.toString, sb)
}
}
diff --git a/src/library/scala/xml/Comment.scala b/src/library/scala/xml/Comment.scala
index ff4280d691..b8dccdcb16 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/library/scala/xml/Comment.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `Comment` implements an XML node for comments.
*
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
index a064c4d8e8..9a725014fc 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/library/scala/xml/Document.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** A document information item (according to InfoSet spec). The comments
* are copied from the Infoset spec, only augmented with some information
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index b9e665e292..4200b7046a 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This singleton object contains the `apply` and `unapplySeq` methods for
* convenient construction and deconstruction. It is possible to deconstruct
@@ -17,7 +18,7 @@ package scala.xml
* @author Burak Emir <bqe@google.com>
*/
object Elem {
- /** Build an Elem, setting its minimizeEmpty property to <code>true</code> if it has no children. Note that this
+ /** Build an Elem, setting its minimizeEmpty property to `true` if it has no children. Note that this
* default may not be exactly what you want, as some XML dialects don't permit some elements to be minimized.
*
* @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
diff --git a/src/library/scala/xml/EntityRef.scala b/src/library/scala/xml/EntityRef.scala
index a7b9835a7e..7a58831075 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/library/scala/xml/EntityRef.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `EntityRef` implements an XML node for entity references.
*
diff --git a/src/library/scala/xml/Equality.scala b/src/library/scala/xml/Equality.scala
index 02db22a78a..021d185812 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/library/scala/xml/Equality.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** In an attempt to contain the damage being inflicted on consistency by the
* ad hoc `equals` methods spread around `xml`, the logic is centralized and
@@ -86,8 +87,8 @@ trait Equality extends scala.Equals {
* to maintain a semblance of order.
*/
override def hashCode() = basisForHashCode.##
- override def equals(other: Any) = doComparison(other, false)
- final def xml_==(other: Any) = doComparison(other, true)
+ override def equals(other: Any) = doComparison(other, blithe = false)
+ final def xml_==(other: Any) = doComparison(other, blithe = true)
final def xml_!=(other: Any) = !xml_==(other)
/** The "blithe" parameter expresses the caller's unconcerned attitude
diff --git a/src/library/scala/xml/Group.scala b/src/library/scala/xml/Group.scala
index 92da2f993f..e3af615008 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/library/scala/xml/Group.scala
@@ -6,14 +6,15 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** A hack to group XML nodes in one node for output.
*
* @author Burak Emir
* @version 1.0
*/
-final case class Group(val nodes: Seq[Node]) extends Node {
+final case class Group(nodes: Seq[Node]) extends Node {
override def theSeq = nodes
override def canEqual(other: Any) = other match {
diff --git a/src/library/scala/xml/MalformedAttributeException.scala b/src/library/scala/xml/MalformedAttributeException.scala
index 3431cb6765..d499ad3e10 100644
--- a/src/library/scala/xml/MalformedAttributeException.scala
+++ b/src/library/scala/xml/MalformedAttributeException.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
case class MalformedAttributeException(msg: String) extends RuntimeException(msg)
diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala
index 3bf3ebb1c0..8b5ea187cb 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/library/scala/xml/MetaData.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.sbToString
import scala.annotation.tailrec
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index c7cd9e6b6c..b320466976 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.sbToString
@@ -38,6 +39,20 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
override def toString(): String = sbToString(buildString(_, TopScope))
+ private def shadowRedefined(stop: NamespaceBinding): NamespaceBinding = {
+ def prefixList(x: NamespaceBinding): List[String] =
+ if ((x == null) || (x eq stop)) Nil
+ else x.prefix :: prefixList(x.parent)
+ def fromPrefixList(l: List[String]): NamespaceBinding = l match {
+ case Nil => stop
+ case x :: xs => new NamespaceBinding(x, this.getURI(x), fromPrefixList(xs))
+ }
+ val ps0 = prefixList(this).reverse
+ val ps = ps0.distinct
+ if (ps.size == ps0.size) this
+ else fromPrefixList(ps)
+ }
+
override def canEqual(other: Any) = other match {
case _: NamespaceBinding => true
case _ => false
@@ -53,12 +68,16 @@ case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBindin
def buildString(stop: NamespaceBinding): String = sbToString(buildString(_, stop))
def buildString(sb: StringBuilder, stop: NamespaceBinding) {
- if (this eq stop) return // contains?
+ shadowRedefined(stop).doBuildString(sb, stop)
+ }
+
+ private def doBuildString(sb: StringBuilder, stop: NamespaceBinding) {
+ if ((this == null) || (this eq stop)) return // contains?
val s = " xmlns%s=\"%s\"".format(
(if (prefix != null) ":" + prefix else ""),
(if (uri != null) uri else "")
)
- parent.buildString(sb append s, stop) // copy(ignore)
+ parent.doBuildString(sb append s, stop) // copy(ignore)
}
}
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 6b6c962692..e121284252 100755
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This singleton object contains the `unapplySeq` method for
* convenient deconstruction.
@@ -55,7 +56,7 @@ abstract class Node extends NodeSeq {
def scope: NamespaceBinding = TopScope
/**
- * convenience, same as <code>getNamespace(this.prefix)</code>
+ * convenience, same as `getNamespace(this.prefix)`
*/
def namespace = getNamespace(this.prefix)
@@ -64,8 +65,8 @@ abstract class Node extends NodeSeq {
* checks if scope is `'''null'''`.
*
* @param pre the prefix whose namespace name we would like to obtain
- * @return the namespace if <code>scope != null</code> and prefix was
- * found, else <code>null</code>
+ * @return the namespace if `scope != null` and prefix was
+ * found, else `null`
*/
def getNamespace(pre: String): String = if (scope eq null) null else scope.getURI(pre)
@@ -74,8 +75,8 @@ abstract class Node extends NodeSeq {
* Same as `attributes.getValue(key)`
*
* @param key of queried attribute.
- * @return value of <code>UnprefixedAttribute</code> with given key
- * in attributes, if it exists, otherwise <code>null</code>.
+ * @return value of `UnprefixedAttribute` with given key
+ * in attributes, if it exists, otherwise `null`.
*/
final def attribute(key: String): Option[Seq[Node]] = attributes.get(key)
@@ -163,7 +164,7 @@ abstract class Node extends NodeSeq {
/**
* Same as `toString('''false''')`.
*/
- override def toString(): String = buildString(false)
+ override def toString(): String = buildString(stripComments = false)
/**
* Appends qualified name of this node to `StringBuilder`.
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/library/scala/xml/NodeBuffer.scala
index 2db4338fb2..ae7c7b2bf8 100644
--- a/src/library/scala/xml/NodeBuffer.scala
+++ b/src/library/scala/xml/NodeBuffer.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/**
* This class acts as a Buffer for nodes. If it is used as a sequence of
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index decf60dad7..b8022472fb 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import scala.collection.{ mutable, immutable, generic, SeqLike, AbstractSeq }
import mutable.{ Builder, ListBuffer }
@@ -145,6 +146,11 @@ abstract class NodeSeq extends AbstractSeq[Node] with immutable.Seq[Node] with S
}
}
+ /** Convenience method which returns string text of the named attribute. Use:
+ * - `that \@ "foo"` to get the string text of attribute `"foo"`;
+ */
+ def \@(attributeName: String): String = (this \ ("@" + attributeName)).text
+
override def toString(): String = theSeq.mkString
def text: String = (this map (_.text)).mkString
diff --git a/src/library/scala/xml/Null.scala b/src/library/scala/xml/Null.scala
index b39ef5dc67..f763c023c4 100644
--- a/src/library/scala/xml/Null.scala
+++ b/src/library/scala/xml/Null.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.isNameStart
import scala.collection.Iterator
diff --git a/src/library/scala/xml/PCData.scala b/src/library/scala/xml/PCData.scala
index 64818a9c00..31eea2b6d7 100644
--- a/src/library/scala/xml/PCData.scala
+++ b/src/library/scala/xml/PCData.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This class (which is not used by all XML parsers, but always used by the
* XHTML one) represents parseable character data, which appeared as CDATA
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala
index 429cd682d6..4ab79c8677 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/library/scala/xml/PrefixedAttribute.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** prefixed attributes always have a non-null namespace.
*
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 39ff8c35ec..9e01905357 100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import Utility.sbToString
@@ -47,7 +48,6 @@ class PrettyPrinter(width: Int, step: Int) {
val tmp = width - cur
if (s.length <= tmp)
return List(Box(ind, s))
- val sb = new StringBuilder()
var i = s indexOf ' '
if (i > tmp || i == -1) throw new BrokenException() // cannot break
@@ -142,13 +142,13 @@ class PrettyPrinter(width: Int, step: Int) {
case Text(s) if s.trim() == "" =>
;
case _:Atom[_] | _:Comment | _:EntityRef | _:ProcInstr =>
- makeBox( ind, node.toString.trim() )
+ makeBox( ind, node.toString().trim() )
case g @ Group(xs) =>
traverse(xs.iterator, pscope, ind)
case _ =>
val test = {
val sb = new StringBuilder()
- Utility.serialize(node, pscope, sb, false)
+ Utility.serialize(node, pscope, sb, stripComments = false)
if (doPreserve(node)) sb.toString
else TextBuffer.fromString(sb.toString).toText(0).data
}
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/library/scala/xml/ProcInstr.scala
index 64a9dd5ca3..189c1c6878 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/library/scala/xml/ProcInstr.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** an XML node for processing instructions (PI)
*
diff --git a/src/library/scala/xml/QNode.scala b/src/library/scala/xml/QNode.scala
index d4d3872181..f9e3f1854b 100644
--- a/src/library/scala/xml/QNode.scala
+++ b/src/library/scala/xml/QNode.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** This object provides an extractor method to match a qualified node with
* its namespace URI
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/library/scala/xml/SpecialNode.scala
index 4c1b81c7ff..5fef8ef66c 100644
--- a/src/library/scala/xml/SpecialNode.scala
+++ b/src/library/scala/xml/SpecialNode.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** `SpecialNode` is a special XML node which represents either text
* `(PCDATA)`, a comment, a `PI`, or an entity ref.
diff --git a/src/library/scala/xml/Text.scala b/src/library/scala/xml/Text.scala
index 782c80f100..debea0c025 100644
--- a/src/library/scala/xml/Text.scala
+++ b/src/library/scala/xml/Text.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** The class `Text` implements an XML node for text (PCDATA).
* It is used in both non-bound and bound XML representations.
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/library/scala/xml/TextBuffer.scala
index 0b96379d85..514b1701af 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/library/scala/xml/TextBuffer.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
import Utility.isSpace
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
index 1ed1d50e10..474fbbbdb5 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/library/scala/xml/TopScope.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** top level namespace scope. only contains the predefined binding
* for the &quot;xml&quot; prefix which is bound to
diff --git a/src/library/scala/xml/TypeSymbol.scala b/src/library/scala/xml/TypeSymbol.scala
index f02c0263c0..fb371ee340 100644
--- a/src/library/scala/xml/TypeSymbol.scala
+++ b/src/library/scala/xml/TypeSymbol.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
abstract class TypeSymbol
diff --git a/src/library/scala/xml/Unparsed.scala b/src/library/scala/xml/Unparsed.scala
index ef80823611..bc190eb724 100644
--- a/src/library/scala/xml/Unparsed.scala
+++ b/src/library/scala/xml/Unparsed.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
/** An XML node for unparsed content. It will be output verbatim, all bets
* are off regarding wellformedness etc.
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala
index 2985591c95..6fa827da5f 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/library/scala/xml/UnprefixedAttribute.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
/** Unprefixed attributes have the null namespace, and no prefix field
*
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 030a89773e..9134476401 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import scala.collection.mutable
import parsing.XhtmlEntities
@@ -245,10 +246,10 @@ object Utility extends AnyRef with parsing.TokenTests {
if (children.isEmpty) return
else if (children forall isAtomAndNotText) { // add space
val it = children.iterator
- val f = it.next
+ val f = it.next()
serialize(f, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
while (it.hasNext) {
- val x = it.next
+ val x = it.next()
sb.append(' ')
serialize(x, pscope, sb, stripComments, decodeEntities, preserveWhitespace, minimizeTags)
}
@@ -311,14 +312,14 @@ object Utility extends AnyRef with parsing.TokenTests {
while (i < value.length) {
value.charAt(i) match {
case '<' =>
- return "< not allowed in attribute value";
+ return "< not allowed in attribute value"
case '&' =>
val n = getName(value, i+1)
if (n eq null)
- return "malformed entity reference in attribute value ["+value+"]";
+ return "malformed entity reference in attribute value ["+value+"]"
i = i + n.length + 1
if (i >= value.length || value.charAt(i) != ';')
- return "malformed entity reference in attribute value ["+value+"]";
+ return "malformed entity reference in attribute value ["+value+"]"
case _ =>
}
i = i + 1
@@ -333,22 +334,22 @@ object Utility extends AnyRef with parsing.TokenTests {
val it = value.iterator
while (it.hasNext) {
- var c = it.next
+ var c = it.next()
// entity! flush buffer into text node
if (c == '&') {
- c = it.next
+ c = it.next()
if (c == '#') {
- c = it.next
- val theChar = parseCharRef ({ ()=> c },{ () => c = it.next },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
+ c = it.next()
+ val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)})
sb.append(theChar)
}
else {
if (rfb eq null) rfb = new StringBuilder()
rfb append c
- c = it.next
+ c = it.next()
while (c != ';') {
rfb.append(c)
- c = it.next
+ c = it.next()
}
val ref = rfb.toString()
rfb.clear()
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index d101684459..020264e509 100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
import parsing.NoBindingFactoryAdapter
import factory.XMLLoader
@@ -45,8 +46,6 @@ object MinimizeMode extends Enumeration {
val Never = Value
}
-import Source._
-
/** The object `XML` provides constants, and functions to load
* and save XML elements. Use this when data binding is not desired, i.e.
* when XML is handled using `Symbol` nodes.
diff --git a/src/library/scala/xml/Xhtml.scala b/src/library/scala/xml/Xhtml.scala
index 6730548b73..6a12c1a89a 100644
--- a/src/library/scala/xml/Xhtml.scala
+++ b/src/library/scala/xml/Xhtml.scala
@@ -1,5 +1,6 @@
-package scala.xml
+package scala
+package xml
import parsing.XhtmlEntities
import Utility.{ sbToString, isAtomAndNotText }
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index abc71f55bd..4007985dce 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -6,13 +6,11 @@
** |/ **
\* */
-
-
-package scala.xml
+package scala
+package xml
package dtd
-import scala.util.regexp.WordExp
-import scala.util.automata._
+import scala.xml.dtd.impl._
import scala.xml.Utility.sbToString
import PartialFunction._
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/library/scala/xml/dtd/ContentModelParser.scala
index ace02193da..71b391c422 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/library/scala/xml/dtd/ContentModelParser.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** Parser for regexps (content models in DTD element declarations) */
@@ -21,19 +22,19 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
if (token != tok) {
if ((tok == STAR) && (token == END)) // common mistake
scala.sys.error("in DTDs, \n"+
- "mixed content models must be like (#PCDATA|Name|Name|...)*");
+ "mixed content models must be like (#PCDATA|Name|Name|...)*")
else
scala.sys.error("expected "+token2string(tok)+
- ", got unexpected token:"+token2string(token));
+ ", got unexpected token:"+token2string(token))
}
- nextToken
+ nextToken()
}
// s [ '+' | '*' | '?' ]
def maybeSuffix(s: RegExp) = token match {
- case STAR => nextToken; Star(s)
- case PLUS => nextToken; Sequ(s, Star(s))
- case OPT => nextToken; Alt(Eps, s)
+ case STAR => nextToken(); Star(s)
+ case PLUS => nextToken(); Sequ(s, Star(s))
+ case OPT => nextToken(); Alt(Eps, s)
case _ => s
}
@@ -44,45 +45,45 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
case NAME => value match {
case "ANY" => ANY
case "EMPTY" => EMPTY
- case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value );
+ case _ => scala.sys.error("expected ANY, EMPTY or '(' instead of " + value )
}
case LPAREN =>
- nextToken;
- sOpt;
+ nextToken()
+ sOpt()
if (token != TOKEN_PCDATA)
- ELEMENTS(regexp);
+ ELEMENTS(regexp)
else {
- nextToken;
+ nextToken()
token match {
case RPAREN =>
PCDATA
case CHOICE =>
- val res = MIXED(choiceRest(Eps));
- sOpt;
- accept( RPAREN );
- accept( STAR );
+ val res = MIXED(choiceRest(Eps))
+ sOpt()
+ accept( RPAREN )
+ accept( STAR )
res
case _ =>
- scala.sys.error("unexpected token:" + token2string(token) );
+ scala.sys.error("unexpected token:" + token2string(token) )
}
}
case _ =>
- scala.sys.error("unexpected token:" + token2string(token) );
- }
+ scala.sys.error("unexpected token:" + token2string(token) )
+ }
// sopt ::= S?
- def sOpt() = if( token == S ) nextToken;
+ def sOpt() = if( token == S ) nextToken()
// (' S? mixed ::= '#PCDATA' S? ')'
// | '#PCDATA' (S? '|' S? atom)* S? ')*'
// '(' S? regexp ::= cp S? [seqRest|choiceRest] ')' [ '+' | '*' | '?' ]
def regexp: RegExp = {
- val p = particle;
- sOpt;
+ val p = particle
+ sOpt()
maybeSuffix(token match {
- case RPAREN => nextToken; p
+ case RPAREN => nextToken(); p
case CHOICE => val q = choiceRest( p );accept( RPAREN ); q
case COMMA => val q = seqRest( p ); accept( RPAREN ); q
})
@@ -90,24 +91,24 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
// seqRest ::= (',' S? cp S?)+
def seqRest(p: RegExp) = {
- var k = List(p);
+ var k = List(p)
while( token == COMMA ) {
- nextToken;
- sOpt;
- k = particle::k;
- sOpt;
+ nextToken()
+ sOpt()
+ k = particle::k
+ sOpt()
}
Sequ( k.reverse:_* )
}
// choiceRest ::= ('|' S? cp S?)+
def choiceRest( p:RegExp ) = {
- var k = List( p );
+ var k = List( p )
while( token == CHOICE ) {
- nextToken;
- sOpt;
- k = particle::k;
- sOpt;
+ nextToken()
+ sOpt()
+ k = particle::k
+ sOpt()
}
Alt( k.reverse:_* )
}
@@ -115,14 +116,14 @@ object ContentModelParser extends Scanner { // a bit too permissive concerning #
// particle ::= '(' S? regexp
// | name [ '+' | '*' | '?' ]
def particle = token match {
- case LPAREN => nextToken; sOpt; regexp;
- case NAME => val a = Letter(ElemName(value)); nextToken; maybeSuffix(a)
- case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token));
+ case LPAREN => nextToken(); sOpt(); regexp
+ case NAME => val a = Letter(ElemName(value)); nextToken(); maybeSuffix(a)
+ case _ => scala.sys.error("expected '(' or Name, got:"+token2string(token))
}
// atom ::= name
def atom = token match {
- case NAME => val a = Letter(ElemName(value)); nextToken; a
- case _ => scala.sys.error("expected Name, got:"+token2string(token));
+ case NAME => val a = Letter(ElemName(value)); nextToken(); a
+ case _ => scala.sys.error("expected Name, got:"+token2string(token))
}
}
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/library/scala/xml/dtd/DTD.scala
index 1f8af3b59e..16a824fe2c 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/library/scala/xml/dtd/DTD.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
import scala.collection.mutable
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/library/scala/xml/dtd/Decl.scala
index dc4cb93ddf..e6804478bd 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/library/scala/xml/dtd/Decl.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package dtd
import Utility.sbToString
@@ -123,7 +124,7 @@ case class ExtDef(extID:ExternalID) extends EntityDef {
/** a parsed entity reference */
case class PEReference(ent:String) extends MarkupDecl {
if( !Utility.isName( ent ))
- throw new IllegalArgumentException("ent must be an XML Name");
+ throw new IllegalArgumentException("ent must be an XML Name")
override def buildString(sb: StringBuilder): StringBuilder =
sb append '%' append ent append ';'
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/library/scala/xml/dtd/DocType.scala
index 79f8f9fe8b..849d560cc9 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/library/scala/xml/dtd/DocType.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** An XML node for document type declaration.
@@ -15,11 +16,10 @@ package dtd
* @author Burak Emir
*
* @param name name of this DOCTYPE
- * @param extID None, or Some(external ID of this doctype)
+ * @param extID NoExternalID or the external ID of this doctype
* @param intSubset sequence of internal subset declarations
*/
-case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl])
-{
+case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl]) {
if (!Utility.isName(name))
throw new IllegalArgumentException(name+" must be an XML Name")
@@ -29,6 +29,11 @@ case class DocType(name: String, extID: ExternalID, intSubset: Seq[dtd.Decl])
if (intSubset.isEmpty) ""
else intSubset.mkString("[", "", "]")
- """<!DOCTYPE %s %s%s>""".format(name, extID.toString, intString)
+ """<!DOCTYPE %s %s%s>""".format(name, extID.toString(), intString)
}
}
+
+object DocType {
+ /** Creates a doctype with no external id, nor internal subset declarations. */
+ def apply(name: String): DocType = apply(name, NoExternalID, Nil)
+}
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index bfc85f48a9..4830769a7d 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -8,14 +8,17 @@
-package scala.xml
+package scala
+package xml
package dtd
import PartialFunction._
+import scala.collection.mutable
+
import ContentModel.ElemName
import MakeValidationException._ // @todo other exceptions
-import scala.util.automata._
-import scala.collection.mutable
+
+import impl._
/** validate children and/or attributes of an element
* exceptions are created but not thrown.
@@ -61,7 +64,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(md: MetaData): Boolean = {
val len: Int = exc.length
- var ok = new mutable.BitSet(adecls.length)
+ val ok = new mutable.BitSet(adecls.length)
for (attr <- md) {
def attrStr = attr.value.toString
@@ -97,21 +100,21 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(nodes: Seq[Node]): Boolean = contentModel match {
case ANY => true
- case EMPTY => getIterable(nodes, false).isEmpty
- case PCDATA => getIterable(nodes, true).isEmpty
+ case EMPTY => getIterable(nodes, skipPCDATA = false).isEmpty
+ case PCDATA => getIterable(nodes, skipPCDATA = true).isEmpty
case MIXED(ContentModel.Alt(branches @ _*)) => // @todo
val j = exc.length
def find(Key: String): Boolean =
branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false }
- getIterable(nodes, true) map (_.name) filterNot find foreach {
+ getIterable(nodes, skipPCDATA = true) map (_.name) filterNot find foreach {
exc ::= MakeValidationException fromUndefinedElement _
}
(exc.length == j) // - true if no new exception
case _: ELEMENTS =>
dfa isFinal {
- getIterable(nodes, false).foldLeft(0) { (q, e) =>
+ getIterable(nodes, skipPCDATA = false).foldLeft(0) { (q, e) =>
(dfa delta q).getOrElse(e, throw ValidationException("element %s not allowed here" format e))
}
}
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/library/scala/xml/dtd/ExternalID.scala
index 7a7463569e..5a1b5d1a19 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/library/scala/xml/dtd/ExternalID.scala
@@ -7,15 +7,15 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** an ExternalIDs - either PublicID or SystemID
*
* @author Burak Emir
*/
-abstract class ExternalID extends parsing.TokenTests
-{
+abstract class ExternalID extends parsing.TokenTests {
def quoted(s: String) = {
val c = if (s contains '"') '\'' else '"'
c + s + c
@@ -73,3 +73,14 @@ case class PublicID(publicId: String, systemId: String) extends ExternalID {
/** always empty */
def child = Nil
}
+
+/** A marker used when a `DocType` contains no external id.
+ *
+ * @author Michael Bayne
+ */
+object NoExternalID extends ExternalID {
+ val publicId = null
+ val systemId = null
+
+ override def toString = ""
+}
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/library/scala/xml/dtd/Scanner.scala
index 9b64cc61e2..5f9d1ccaed 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/library/scala/xml/dtd/Scanner.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package dtd
/** Scanner for regexps (content models in DTD element declarations)
@@ -28,8 +29,8 @@ class Scanner extends Tokens with parsing.TokenTests {
value = ""
it = (s).iterator
token = 1+END
- next
- nextToken
+ next()
+ nextToken()
}
/** scans the next token */
@@ -39,29 +40,29 @@ class Scanner extends Tokens with parsing.TokenTests {
// todo: see XML specification... probably isLetter,isDigit is fine
final def isIdentChar = ( ('a' <= c && c <= 'z')
- || ('A' <= c && c <= 'Z'));
+ || ('A' <= c && c <= 'Z'))
- final def next() = if (it.hasNext) c = it.next else c = ENDCH
+ final def next() = if (it.hasNext) c = it.next() else c = ENDCH
final def acc(d: Char) {
- if (c == d) next else scala.sys.error("expected '"+d+"' found '"+c+"' !");
+ if (c == d) next() else scala.sys.error("expected '"+d+"' found '"+c+"' !")
}
final def accS(ds: Seq[Char]) { ds foreach acc }
final def readToken: Int =
if (isSpace(c)) {
- while (isSpace(c)) c = it.next
+ while (isSpace(c)) c = it.next()
S
} else c match {
- case '(' => next; LPAREN
- case ')' => next; RPAREN
- case ',' => next; COMMA
- case '*' => next; STAR
- case '+' => next; PLUS
- case '?' => next; OPT
- case '|' => next; CHOICE
- case '#' => next; accS( "PCDATA" ); TOKEN_PCDATA
+ case '(' => next(); LPAREN
+ case ')' => next(); RPAREN
+ case ',' => next(); COMMA
+ case '*' => next(); STAR
+ case '+' => next(); PLUS
+ case '?' => next(); OPT
+ case '|' => next(); CHOICE
+ case '#' => next(); accS( "PCDATA" ); TOKEN_PCDATA
case ENDCH => END
case _ =>
if (isNameStart(c)) name; // NAME
@@ -70,7 +71,7 @@ class Scanner extends Tokens with parsing.TokenTests {
final def name = {
val sb = new StringBuilder()
- do { sb.append(c); next } while (isNameChar(c));
+ do { sb.append(c); next() } while (isNameChar(c))
value = sb.toString()
NAME
}
diff --git a/src/library/scala/xml/dtd/Tokens.scala b/src/library/scala/xml/dtd/Tokens.scala
index eaffba99a4..07e888e77a 100644
--- a/src/library/scala/xml/dtd/Tokens.scala
+++ b/src/library/scala/xml/dtd/Tokens.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package dtd
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/library/scala/xml/dtd/ValidationException.scala
index 243db69ab7..1bfae55286 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/library/scala/xml/dtd/ValidationException.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package dtd
@@ -33,7 +34,7 @@ object MakeValidationException {
def fromMissingAttribute(allKeys: Set[String]) = {
val sb = new StringBuilder("missing value for REQUIRED attribute")
- if (allKeys.size > 1) sb.append('s');
+ if (allKeys.size > 1) sb.append('s')
allKeys foreach (k => sb append "'%s'".format(k))
new ValidationException(sb.toString())
}
diff --git a/src/library/scala/util/regexp/Base.scala b/src/library/scala/xml/dtd/impl/Base.scala
index 7dbe60a34e..91ff03a93a 100644
--- a/src/library/scala/util/regexp/Base.scala
+++ b/src/library/scala/xml/dtd/impl/Base.scala
@@ -8,7 +8,8 @@
-package scala.util.regexp
+package scala
+package xml.dtd.impl
/** Basic regular expressions.
*
@@ -17,7 +18,7 @@ package scala.util.regexp
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class Base {
+private[dtd] abstract class Base {
type _regexpT <: RegExp
abstract class RegExp {
diff --git a/src/library/scala/util/automata/BaseBerrySethi.scala b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
index 3f6f4507a9..f30309b037 100644
--- a/src/library/scala/util/automata/BaseBerrySethi.scala
+++ b/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
@@ -6,9 +6,9 @@
** |/ **
\* */
-package scala.util.automata
+package scala
+package xml.dtd.impl
-import scala.util.regexp.{ Base }
import scala.collection.{ mutable, immutable }
// todo: replace global variable pos with acc
@@ -18,7 +18,7 @@ import scala.collection.{ mutable, immutable }
* position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class BaseBerrySethi {
+private[dtd] abstract class BaseBerrySethi {
val lang: Base
import lang.{ Alt, Eps, Meta, RegExp, Sequ, Star }
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/xml/dtd/impl/DetWordAutom.scala
index 5d709106f8..6f8ba4de72 100644
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ b/src/library/scala/xml/dtd/impl/DetWordAutom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.automata
+package scala
+package xml.dtd.impl
import scala.collection.{ mutable, immutable }
@@ -21,7 +22,7 @@ import scala.collection.{ mutable, immutable }
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class DetWordAutom[T <: AnyRef] {
+private[dtd] abstract class DetWordAutom[T <: AnyRef] {
val nstates: Int
val finals: Array[Int]
val delta: Array[mutable.Map[T, Int]]
diff --git a/src/library/scala/util/automata/Inclusion.scala b/src/library/scala/xml/dtd/impl/Inclusion.scala
index 91441bd3a8..07b6afaeba 100644
--- a/src/library/scala/util/automata/Inclusion.scala
+++ b/src/library/scala/xml/dtd/impl/Inclusion.scala
@@ -8,7 +8,8 @@
-package scala.util.automata
+package scala
+package xml.dtd.impl
/** A fast test of language inclusion between minimal automata.
@@ -18,7 +19,7 @@ package scala.util.automata
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-trait Inclusion[A <: AnyRef] {
+private[dtd] trait Inclusion[A <: AnyRef] {
val labels: Seq[A]
diff --git a/src/library/scala/util/automata/NondetWordAutom.scala b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
index 24c6612d0f..0bb19a7e3e 100644
--- a/src/library/scala/util/automata/NondetWordAutom.scala
+++ b/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.util.automata
+package scala
+package xml.dtd.impl
import scala.collection.{ immutable, mutable }
@@ -18,7 +19,7 @@ import scala.collection.{ immutable, mutable }
* the partial function `finals` is defined.
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class NondetWordAutom[T <: AnyRef] {
+private[dtd] abstract class NondetWordAutom[T <: AnyRef] {
val nstates: Int
val labels: Seq[T]
val finals: Array[Int] // 0 means not final
@@ -37,10 +38,10 @@ abstract class NondetWordAutom[T <: AnyRef] {
/** @return true if there are no accepting states */
final def isEmpty = (0 until nstates) forall (x => !isFinal(x))
- /** @return a immutable.BitSet with the next states for given state and label */
+ /** @return an immutable.BitSet with the next states for given state and label */
def next(q: Int, a: T): immutable.BitSet = delta(q).getOrElse(a, default(q))
- /** @return a immutable.BitSet with the next states for given state and label */
+ /** @return an immutable.BitSet with the next states for given state and label */
def next(Q: immutable.BitSet, a: T): immutable.BitSet = next(Q, next(_, a))
def nextDefault(Q: immutable.BitSet): immutable.BitSet = next(Q, default)
@@ -51,7 +52,7 @@ abstract class NondetWordAutom[T <: AnyRef] {
override def toString = {
val finalString = Map(finalStates map (j => j -> finals(j)) : _*).toString
- val deltaString = (0 until nstates)
+ val deltaString = (0 until nstates)
.map(i => " %d->%s\n _>%s\n".format(i, delta(i), default(i))).mkString
"[NondetWordAutom nstates=%d finals=%s delta=\n%s".format(nstates, finalString, deltaString)
diff --git a/src/library/scala/util/regexp/PointedHedgeExp.scala b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
index 5c0379b6f8..1720604132 100644
--- a/src/library/scala/util/regexp/PointedHedgeExp.scala
+++ b/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
@@ -8,7 +8,8 @@
-package scala.util.regexp
+package scala
+package xml.dtd.impl
/** Pointed regular hedge expressions, a useful subclass of regular hedge expressions.
*
@@ -16,7 +17,7 @@ package scala.util.regexp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class PointedHedgeExp extends Base {
+private[dtd] abstract class PointedHedgeExp extends Base {
type _regexpT <: RegExp
type _labelT
diff --git a/src/library/scala/util/automata/SubsetConstruction.scala b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
index 0ee768587c..632ca1eb18 100644
--- a/src/library/scala/util/automata/SubsetConstruction.scala
+++ b/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
@@ -6,12 +6,13 @@
** |/ **
\* */
-package scala.util.automata
+package scala
+package xml.dtd.impl
import scala.collection.{ mutable, immutable }
@deprecated("This class will be removed", "2.10.0")
-class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
+private[dtd] class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
import nfa.labels
def selectTag(Q: immutable.BitSet, finals: Array[Int]) =
@@ -50,7 +51,7 @@ class SubsetConstruction[T <: AnyRef](val nfa: NondetWordAutom[T]) {
addFinal(q0) // initial state may also be a final state
while (!rest.isEmpty) {
- val P = rest.pop
+ val P = rest.pop()
// assign a number to this bitset
indexMap = indexMap.updated(P, ix)
invIndexMap = invIndexMap.updated(ix, P)
diff --git a/src/library/scala/util/regexp/SyntaxError.scala b/src/library/scala/xml/dtd/impl/SyntaxError.scala
index 1788fdfb84..a5b8a5aba0 100644
--- a/src/library/scala/util/regexp/SyntaxError.scala
+++ b/src/library/scala/xml/dtd/impl/SyntaxError.scala
@@ -8,7 +8,8 @@
-package scala.util.regexp
+package scala
+package xml.dtd.impl
/** This runtime exception is thrown if an attempt to instantiate a
* syntactically incorrect expression is detected.
@@ -17,4 +18,4 @@ package scala.util.regexp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-class SyntaxError(e: String) extends RuntimeException(e)
+private[dtd] class SyntaxError(e: String) extends RuntimeException(e)
diff --git a/src/library/scala/util/automata/WordBerrySethi.scala b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
index 12448f595d..9bf3fa518b 100644
--- a/src/library/scala/util/automata/WordBerrySethi.scala
+++ b/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
@@ -6,10 +6,10 @@
** |/ **
\* */
-package scala.util.automata
+package scala
+package xml.dtd.impl
import scala.collection.{ immutable, mutable }
-import scala.util.regexp.WordExp
/** This class turns a regular expression into a [[scala.util.automata.NondetWordAutom]]
* celebrated position automata construction (also called ''Berry-Sethi'' or ''Glushkov'').
@@ -18,10 +18,10 @@ import scala.util.regexp.WordExp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class WordBerrySethi extends BaseBerrySethi {
+private[dtd] abstract class WordBerrySethi extends BaseBerrySethi {
override val lang: WordExp
- import lang.{ Alt, Eps, Letter, Meta, RegExp, Sequ, Star, _labelT }
+ import lang.{ Alt, Eps, Letter, RegExp, Sequ, Star, _labelT }
protected var labels: mutable.HashSet[_labelT] = _
// don't let this fool you, only labelAt is a real, surjective mapping
@@ -140,7 +140,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
val delta1 = immutable.Map(deltaq.zipWithIndex map (_.swap): _*)
val finalsArr = (0 until pos map (k => finals.getOrElse(k, 0))).toArray // 0 == not final
- val initialsArr = initials.toArray
val deltaArr: Array[mutable.Map[_labelT, immutable.BitSet]] =
(0 until pos map { x =>
@@ -152,7 +151,6 @@ abstract class WordBerrySethi extends BaseBerrySethi {
new NondetWordAutom[_labelT] {
val nstates = pos
val labels = WordBerrySethi.this.labels.toList
- val initials = initialsArr
val finals = finalsArr
val delta = deltaArr
val default = defaultArr
diff --git a/src/library/scala/util/regexp/WordExp.scala b/src/library/scala/xml/dtd/impl/WordExp.scala
index 3c0c2ec156..a4bb54c1ea 100644
--- a/src/library/scala/util/regexp/WordExp.scala
+++ b/src/library/scala/xml/dtd/impl/WordExp.scala
@@ -8,7 +8,8 @@
-package scala.util.regexp
+package scala
+package xml.dtd.impl
/**
* The class `WordExp` provides regular word expressions.
@@ -39,7 +40,7 @@ package scala.util.regexp
* @version 1.0
*/
@deprecated("This class will be removed", "2.10.0")
-abstract class WordExp extends Base {
+private[dtd] abstract class WordExp extends Base {
abstract class Label
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/library/scala/xml/factory/Binder.scala
index bad4a4ea09..947f99e6a4 100755
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/library/scala/xml/factory/Binder.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package factory
import parsing.ValidatingMarkupHandler
@@ -48,7 +49,7 @@ abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
val old = result
result = new NodeBuffer()
for (m <- x.child) traverse(m)
- result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList;
+ result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList
elemEnd(0, x.prefix, x.label)
}
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/library/scala/xml/factory/LoggedNodeFactory.scala
index cac61acc39..bc074bfc83 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/library/scala/xml/factory/LoggedNodeFactory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package factory
/** This class logs what the nodefactory is actually doing.
@@ -14,8 +15,9 @@ package factory
{{{
object testLogged extends App {
val x = new scala.xml.parsing.NoBindingFactoryAdapter
- with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem]
- with scala.util.logging.ConsoleLogger
+ with scala.xml.factory.LoggedNodeFactory[scala.xml.Elem] {
+ override def log(s: String) = println(s)
+ }
Console.println("Start")
val doc = x.load(new java.net.URL("http://example.com/file.xml"))
@@ -27,7 +29,8 @@ object testLogged extends App {
* @author Burak Emir
* @version 1.0
*/
-trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.logging.Logged {
+@deprecated("This trait will be removed.", "2.11")
+trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] {
// configuration values
val logNode = true
val logText = false
@@ -46,7 +49,7 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
override def makeNode(pre: String, label: String, attrSeq: MetaData,
scope: NamespaceBinding, children: Seq[Node]): A = {
if (logNode)
- log("[makeNode for "+label+"]");
+ log("[makeNode for "+label+"]")
val hash = Utility.hashCode(pre, label, attrSeq.##, scope.##, children)
@@ -59,27 +62,29 @@ trait LoggedNodeFactory[A <: Node] extends NodeFactory[A] with scala.util.loggin
}
*/
if (!cache.get( hash ).isEmpty && (logCompressLevel >= CACHE))
- log("[cache hit !]");
+ log("[cache hit !]")
super.makeNode(pre, label, attrSeq, scope, children)
}
override def makeText(s: String) = {
if (logText)
- log("[makeText:\""+s+"\"]");
+ log("[makeText:\""+s+"\"]")
super.makeText(s)
}
override def makeComment(s: String): Seq[Comment] = {
if (logComment)
- log("[makeComment:\""+s+"\"]");
+ log("[makeComment:\""+s+"\"]")
super.makeComment(s)
}
override def makeProcInstr(t: String, s: String): Seq[ProcInstr] = {
if (logProcInstr)
- log("[makeProcInstr:\""+t+" "+ s+"\"]");
+ log("[makeProcInstr:\""+t+" "+ s+"\"]")
super.makeProcInstr(t, s)
}
+ @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
+ def log(msg: String): Unit = {}
}
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/library/scala/xml/factory/NodeFactory.scala
index 28a1b6fff4..94801bb554 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/library/scala/xml/factory/NodeFactory.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package factory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/library/scala/xml/factory/XMLLoader.scala
index 72e4c51b11..b69f187039 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/library/scala/xml/factory/XMLLoader.scala
@@ -7,12 +7,13 @@
\* */
-package scala.xml
+package scala
+package xml
package factory
import javax.xml.parsers.SAXParserFactory
import parsing.{ FactoryAdapter, NoBindingFactoryAdapter }
-import java.io.{ InputStream, Reader, StringReader, File, FileDescriptor, FileInputStream }
+import java.io.{ InputStream, Reader, File, FileDescriptor }
import java.net.URL
/** Presents collection of XML loading methods which use the parser
@@ -38,7 +39,7 @@ trait XMLLoader[T <: Node]
newAdapter.scopeStack push TopScope
parser.parse(source, newAdapter)
- newAdapter.scopeStack.pop
+ newAdapter.scopeStack.pop()
newAdapter.rootElem.asInstanceOf[T]
}
diff --git a/src/library/scala/xml/include/CircularIncludeException.scala b/src/library/scala/xml/include/CircularIncludeException.scala
index 5e74967d54..351f403008 100644
--- a/src/library/scala/xml/include/CircularIncludeException.scala
+++ b/src/library/scala/xml/include/CircularIncludeException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include
/**
diff --git a/src/library/scala/xml/include/UnavailableResourceException.scala b/src/library/scala/xml/include/UnavailableResourceException.scala
index f00cc58699..47b176e0f3 100644
--- a/src/library/scala/xml/include/UnavailableResourceException.scala
+++ b/src/library/scala/xml/include/UnavailableResourceException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include
/**
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/library/scala/xml/include/XIncludeException.scala
index 84033f853f..11e1644d83 100644
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ b/src/library/scala/xml/include/XIncludeException.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include
/**
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
index 1340689cae..57ab5ed91c 100644
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ b/src/library/scala/xml/include/sax/EncodingHeuristics.scala
@@ -6,10 +6,9 @@
** |/ **
\* */
-
-package scala.xml
+package scala
+package xml
package include.sax
-import scala.xml.include._
import java.io.InputStream
import scala.util.matching.Regex
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
deleted file mode 100644
index 92d4d6ea73..0000000000
--- a/src/library/scala/xml/include/sax/Main.scala
+++ /dev/null
@@ -1,82 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-
-package scala.xml
-package include.sax
-
-import scala.util.control.Exception.{ catching, ignoring }
-import org.xml.sax.XMLReader
-import org.xml.sax.helpers.XMLReaderFactory
-
-@deprecated("Code example will be moved to documentation.", "2.10.0")
-object Main {
- private val namespacePrefixes = "http://xml.org/sax/features/namespace-prefixes"
- private val lexicalHandler = "http://xml.org/sax/properties/lexical-handler"
-
- /**
- * The driver method for xinc
- * Output is written to System.out via Conolse
- * </p>
- *
- * @param args contains the URLs and/or filenames
- * of the documents to be processed.
- */
- def main(args: Array[String]) {
- def saxe[T](body: => T) = catching[T](classOf[SAXException]) opt body
- def fail(msg: String) = System.err.println(msg)
-
- val parser: XMLReader =
- saxe[XMLReader](XMLReaderFactory.createXMLReader()) getOrElse (
- saxe[XMLReader](XMLReaderFactory.createXMLReader(XercesClassName)) getOrElse (
- return fail("Could not find an XML parser")
- )
- )
-
- // Need better namespace handling
- try parser.setFeature(namespacePrefixes, true)
- catch { case e: SAXException => return System.err.println(e) }
-
- if (args.isEmpty)
- return
-
- def dashR = args.size >= 2 && args(0) == "-r"
- val args2 = if (dashR) args drop 2 else args
- val resolver: Option[EntityResolver] =
- if (dashR) None
- else catching(classOf[Exception]) opt {
- val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
- parser setEntityResolver r
- r
- } orElse (return fail("Could not load requested EntityResolver"))
-
- for (arg <- args2) {
- try {
- val includer = new XIncludeFilter()
- includer setParent parser
- val s = new XIncluder(System.out, "UTF-8")
- includer setContentHandler s
-
- resolver map (includer setEntityResolver _)
- // SAXException here means will not support comments
- ignoring(classOf[SAXException]) {
- includer.setProperty(lexicalHandler, s)
- s setFilter includer
- }
- includer parse arg
- }
- catch {
- case e: SAXParseException =>
- fail(e.toString)
- fail("Problem in %s at line %d".format(e.getSystemId, e.getLineNumber))
- case e: SAXException =>
- fail(e.toString)
- }
- }
- }
-}
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/library/scala/xml/include/sax/XIncludeFilter.scala
index 729769366e..3fa3beefb0 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/library/scala/xml/include/sax/XIncludeFilter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package include.sax
import scala.xml.include._
@@ -147,10 +148,10 @@ class XIncludeFilter extends XMLFilterImpl {
if (parse equals "text") {
val encoding = atts getValue "encoding"
- includeTextDocument(href, encoding);
+ includeTextDocument(href, encoding)
}
else if (parse equals "xml") {
- includeXMLDocument(href);
+ includeXMLDocument(href)
}
// Need to check this also in DOM and JDOM????
else {
@@ -184,7 +185,7 @@ class XIncludeFilter extends XMLFilterImpl {
}
}
- private var depth = 0;
+ private var depth = 0
override def startDocument() {
level = 0
@@ -240,7 +241,7 @@ class XIncludeFilter extends XMLFilterImpl {
}
locationString = (" in document included from " + publicID
+ " at " + systemID
- + " at line " + line + ", column " + column);
+ + " at line " + line + ", column " + column)
locationString
}
@@ -258,7 +259,7 @@ class XIncludeFilter extends XMLFilterImpl {
*/
private def includeTextDocument(url: String, encoding1: String) {
var encoding = encoding1
- if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8";
+ if (encoding == null || encoding.trim().equals("")) encoding = "UTF-8"
var source: URL = null
try {
val base = bases.peek().asInstanceOf[URL]
@@ -275,7 +276,7 @@ class XIncludeFilter extends XMLFilterImpl {
try {
val uc = source.openConnection()
val in = new BufferedInputStream(uc.getInputStream())
- var encodingFromHeader = uc.getContentEncoding()
+ val encodingFromHeader = uc.getContentEncoding()
var contentType = uc.getContentType()
if (encodingFromHeader != null)
encoding = encodingFromHeader
@@ -284,13 +285,13 @@ class XIncludeFilter extends XMLFilterImpl {
// MIME types are case-insensitive
// Java may be picking this up from file URL
if (contentType != null) {
- contentType = contentType.toLowerCase();
+ contentType = contentType.toLowerCase()
if (contentType.equals("text/xml")
|| contentType.equals("application/xml")
|| (contentType.startsWith("text/") && contentType.endsWith("+xml") )
|| (contentType.startsWith("application/") && contentType.endsWith("+xml"))) {
- encoding = EncodingHeuristics.readEncodingFromStream(in);
- }
+ encoding = EncodingHeuristics.readEncodingFromStream(in)
+ }
}
}
val reader = new InputStreamReader(in, encoding)
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/library/scala/xml/include/sax/XIncluder.scala
index 5064d6b3d8..1939fa1875 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/library/scala/xml/include/sax/XIncluder.scala
@@ -6,11 +6,10 @@
** |/ **
\* */
-
-package scala.xml
+package scala
+package xml
package include.sax
-import scala.xml.include._
import scala.collection.mutable
import org.xml.sax.{ ContentHandler, XMLReader, Locator, Attributes }
import org.xml.sax.ext.LexicalHandler
@@ -30,7 +29,7 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def startDocument() {
try {
out.write("<?xml version='1.0' encoding='"
- + encoding + "'?>\r\n");
+ + encoding + "'?>\r\n")
}
catch {
case e:IOException =>
@@ -54,16 +53,16 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def startElement(namespaceURI: String, localName: String, qualifiedName: String, atts: Attributes) = {
try {
- out.write("<" + qualifiedName);
+ out.write("<" + qualifiedName)
var i = 0; while (i < atts.getLength()) {
- out.write(" ");
- out.write(atts.getQName(i));
- out.write("='");
- val value = atts.getValue(i);
+ out.write(" ")
+ out.write(atts.getQName(i))
+ out.write("='")
+ val value = atts.getValue(i)
// @todo Need to use character references if the encoding
// can't support the character
out.write(scala.xml.Utility.escape(value))
- out.write("'");
+ out.write("'")
i += 1
}
out.write(">")
@@ -89,20 +88,20 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
def characters(ch: Array[Char], start: Int, length: Int) {
try {
var i = 0; while (i < length) {
- val c = ch(start+i);
- if (c == '&') out.write("&amp;");
- else if (c == '<') out.write("&lt;");
+ val c = ch(start+i)
+ if (c == '&') out.write("&amp;")
+ else if (c == '<') out.write("&lt;")
// This next fix is normally not necessary.
// However, it is required if text contains ]]>
// (The end CDATA section delimiter)
- else if (c == '>') out.write("&gt;");
- else out.write(c);
+ else if (c == '>') out.write("&gt;")
+ else out.write(c.toInt)
i += 1
}
}
catch {
case e: IOException =>
- throw new SAXException("Write failed", e);
+ throw new SAXException("Write failed", e)
}
}
@@ -140,8 +139,8 @@ class XIncluder(outs: OutputStream, encoding: String) extends ContentHandler wit
// if this is the source document, output a DOCTYPE declaration
if (entities.isEmpty) {
var id = ""
- if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"';
- else if (systemID != null) id = " SYSTEM \"" + systemID + '"';
+ if (publicID != null) id = " PUBLIC \"" + publicID + "\" \"" + systemID + '"'
+ else if (systemID != null) id = " SYSTEM \"" + systemID + '"'
try {
out.write("<!DOCTYPE " + name + id + ">\r\n")
}
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/library/scala/xml/parsing/ConstructingHandler.scala
index 6fda4dabfb..ba416e4301 100755
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ b/src/library/scala/xml/parsing/ConstructingHandler.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
/** Implementation of MarkupHandler that constructs nodes.
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index 404411812e..3caeddabf4 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
import java.io.File
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
index 0152e44cda..6ec7474843 100755
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/library/scala/xml/parsing/ExternalSources.scala
index aaac588092..bb939bca95 100644
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ b/src/library/scala/xml/parsing/ExternalSources.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
import java.net.URL
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/library/scala/xml/parsing/FactoryAdapter.scala
index 5f776f5299..2154bdf5ba 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/FactoryAdapter.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import java.io.{ InputStream, Reader, File, FileDescriptor, FileInputStream }
@@ -26,7 +27,7 @@ trait ConsoleErrorHandler extends DefaultHandler {
val s = "[%s]:%d:%d: %s".format(
errtype, ex.getLineNumber, ex.getColumnNumber, ex.getMessage)
Console.println(s)
- Console.flush
+ Console.flush()
}
}
@@ -91,7 +92,7 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
else {
var it = ch.slice(offset, offset + length).iterator
while (it.hasNext) {
- val c = it.next
+ val c = it.next()
val isSpace = c.isWhitespace
buffer append (if (isSpace) ' ' else c)
if (isSpace)
@@ -164,17 +165,17 @@ abstract class FactoryAdapter extends DefaultHandler with factory.XMLLoader[Node
*/
override def endElement(uri: String , _localName: String, qname: String): Unit = {
captureText()
- val metaData = attribStack.pop
+ val metaData = attribStack.pop()
// reverse order to get it right
val v = (Iterator continually hStack.pop takeWhile (_ != null)).toList.reverse
val (pre, localName) = splitName(qname)
- val scp = scopeStack.pop
+ val scp = scopeStack.pop()
// create element
rootElem = createNode(pre, localName, metaData, scp, v)
hStack push rootElem
- curTag = tagStack.pop
+ curTag = tagStack.pop()
capture = curTag != null && nodeContainsText(curTag) // root level
}
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/library/scala/xml/parsing/FatalError.scala
index a8b4f8f8cf..ab3cb2a74d 100644
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ b/src/library/scala/xml/parsing/FatalError.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package parsing
/** !!! This is poorly named, but I guess it's in the API.
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/library/scala/xml/parsing/MarkupHandler.scala
index 7028161821..1ebffb9c90 100755
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/library/scala/xml/parsing/MarkupHandler.scala
@@ -8,12 +8,12 @@
-package scala.xml
+package scala
+package xml
package parsing
import scala.collection.mutable
import scala.io.Source
-import scala.util.logging.Logged
import scala.xml.dtd._
/** class that handles markup - provides callback methods to MarkupParser.
@@ -25,8 +25,8 @@ import scala.xml.dtd._
* @todo can we ignore more entity declarations (i.e. those with extIDs)?
* @todo expanding entity references
*/
-abstract class MarkupHandler extends Logged
-{
+abstract class MarkupHandler {
+
/** returns true is this markup handler is validating */
val isValidating: Boolean = false
@@ -121,4 +121,7 @@ abstract class MarkupHandler extends Logged
def unparsedEntityDecl(name: String, extID: ExternalID, notat: String): Unit = ()
def notationDecl(notat: String, extID: ExternalID): Unit = ()
def reportSyntaxError(pos: Int, str: String): Unit
+
+ @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
+ def log(msg: String): Unit = {}
}
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index f9ff54d054..3bbd136b67 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.io.Source
@@ -102,13 +103,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def ch: Char = {
if (nextChNeeded) {
if (curInput.hasNext) {
- lastChRead = curInput.next
+ lastChRead = curInput.next()
pos = curInput.pos
} else {
- val ilen = inpStack.length;
+ val ilen = inpStack.length
//Console.println(" ilen = "+ilen+ " extIndex = "+extIndex);
if ((ilen != extIndex) && (ilen > 0)) {
- /** for external source, inpStack == Nil ! need notify of eof! */
+ /* for external source, inpStack == Nil ! need notify of eof! */
pop()
} else {
reachedEof = true
@@ -138,10 +139,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def xmlProcInstr(): MetaData = {
xToken("xml")
- xSpace
+ xSpace()
val (md,scp) = xAttributes(TopScope)
if (scp != TopScope)
- reportSyntaxError("no xmlns definitions here, please.");
+ reportSyntaxError("no xmlns definitions here, please.")
xToken('?')
xToken('>')
md
@@ -154,11 +155,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var info_enc: Option[String] = None
var info_stdl: Option[Boolean] = None
- var m = xmlProcInstr()
+ val m = xmlProcInstr()
var n = 0
if (isProlog)
- xSpaceOpt
+ xSpaceOpt()
m("version") match {
case null =>
@@ -199,11 +200,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* // this is a bit more lenient than necessary...
* }}} */
def prolog(): (Option[String], Option[String], Option[Boolean]) =
- prologOrTextDecl(true)
+ prologOrTextDecl(isProlog = true)
/** prolog, but without standalone */
def textDecl(): (Option[String], Option[String]) =
- prologOrTextDecl(false) match { case (x1, x2, _) => (x1, x2) }
+ prologOrTextDecl(isProlog = false) match { case (x1, x2, _) => (x1, x2) }
/** {{{
* [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
@@ -223,10 +224,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
return null
}
- nextch // is prolog ?
+ nextch() // is prolog ?
var children: NodeSeq = null
if ('?' == ch) {
- nextch
+ nextch()
info_prolog = prolog()
doc.version = info_prolog._1
doc.encoding = info_prolog._2
@@ -247,7 +248,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _:ProcInstr =>
case _:Comment =>
case _:EntityRef => // todo: fix entities, shouldn't be "special"
- reportSyntaxError("no entity references allowed here");
+ reportSyntaxError("no entity references allowed here")
case s:SpecialNode =>
if (s.toString.trim().length > 0) //non-empty text nodes not allowed
elemCount += 2
@@ -272,7 +273,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* after construction, this method formalizes that suboptimal reality.
*/
def initialize: this.type = {
- nextch
+ nextch()
this
}
@@ -303,10 +304,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var scope: NamespaceBinding = pscope
var aMap: MetaData = Null
while (isNameStart(ch)) {
- val pos = this.pos
-
val qname = xName
- val _ = xEQ
+ xEQ() // side effect
val value = xAttributeValue()
Utility.prefix(qname) match {
@@ -326,11 +325,11 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
if ((ch != '/') && (ch != '>') && ('?' != ch))
- xSpace
+ xSpace()
}
if(!aMap.wellformed(scope))
- reportSyntaxError( "double attribute");
+ reportSyntaxError( "double attribute")
(aMap,scope)
}
@@ -343,12 +342,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
*/
def xEntityValue(): String = {
val endch = ch
- nextch
+ nextch()
while (ch != endch && !eof) {
putChar(ch)
- nextch
+ nextch()
}
- nextch
+ nextch()
val str = cbuf.toString()
cbuf.length = 0
str
@@ -377,13 +376,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
val sb: StringBuilder = new StringBuilder()
xToken("--")
while (true) {
- if (ch == '-' && { sb.append(ch); nextch; ch == '-' }) {
+ if (ch == '-' && { sb.append(ch); nextch(); ch == '-' }) {
sb.length = sb.length - 1
- nextch
+ nextch()
xToken('>')
return handle.comment(pos, sb.toString())
} else sb.append(ch)
- nextch
+ nextch()
}
throw FatalError("this cannot happen")
}
@@ -391,10 +390,10 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
/* todo: move this into the NodeBuilder class */
def appendText(pos: Int, ts: NodeBuffer, txt: String): Unit = {
if (preserveWS)
- ts &+ handle.text(pos, txt);
+ ts &+ handle.text(pos, txt)
else
for (t <- TextBuffer.fromString(txt).toText) {
- ts &+ handle.text(pos, t.text);
+ ts &+ handle.text(pos, t.text)
}
}
@@ -404,7 +403,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
ch match {
case '!' =>
- nextch
+ nextch()
if ('[' == ch) // CDATA
ts &+ xCharData
else if ('D' == ch) // doctypedecl, parse DTD // @todo REMOVE HACK
@@ -412,7 +411,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
else // comment
ts &+ xComment
case '?' => // PI
- nextch
+ nextch()
ts &+ xProcInstr
case _ =>
ts &+ element1(pscope) // child
@@ -423,7 +422,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* content1 ::= '<' content1 | '&' charref ...
* }}} */
def content(pscope: NamespaceBinding): NodeSeq = {
- var ts = new NodeBuffer
+ val ts = new NodeBuffer
var exit = eof
// todo: optimize seq repr.
def done = new NodeSeq { val theSeq = ts.toList }
@@ -437,18 +436,18 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch match {
case '<' => // another tag
- nextch; ch match {
+ nextch(); ch match {
case '/' => exit = true // end tag
case _ => content1(pscope, ts)
}
// postcond: xEmbeddedBlock == false!
case '&' => // EntityRef or CharRef
- nextch; ch match {
+ nextch(); ch match {
case '#' => // CharacterRef
- nextch
- val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch))
- xToken(';');
+ nextch()
+ val theChar = handle.text(tmppos, xCharRef(() => ch, () => nextch()))
+ xToken(';')
ts &+ theChar
case _ => // EntityRef
val n = xName
@@ -472,16 +471,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def externalID(): ExternalID = ch match {
case 'S' =>
- nextch
+ nextch()
xToken("YSTEM")
- xSpace
+ xSpace()
val sysID = systemLiteral()
new SystemID(sysID)
case 'P' =>
- nextch; xToken("UBLIC")
- xSpace
+ nextch(); xToken("UBLIC")
+ xSpace()
val pubID = pubidLiteral()
- xSpace
+ xSpace()
val sysID = systemLiteral()
new PublicID(pubID, sysID)
}
@@ -497,13 +496,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
if (this.dtd ne null)
reportSyntaxError("unexpected character (DOCTYPE already defined")
xToken("DOCTYPE")
- xSpace
+ xSpace()
val n = xName
- xSpace
+ xSpace()
//external ID
if ('S' == ch || 'P' == ch) {
extID = externalID()
- xSpaceOpt
+ xSpaceOpt()
}
/* parse external subset of DTD
@@ -520,12 +519,12 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
if ('[' == ch) { // internal subset
- nextch
+ nextch()
/* TODO */
intSubset()
// TODO: do the DTD parsing?? ?!?!?!?!!
xToken(']')
- xSpaceOpt
+ xSpaceOpt()
}
xToken('>')
this.dtd = new DTD {
@@ -582,8 +581,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
var exit = false
while (! exit) {
putChar(ch)
- val opos = pos
- nextch
+ nextch()
exit = eof || ( ch == '<' ) || ( ch == '&' )
}
@@ -600,13 +598,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def systemLiteral(): String = {
val endch = ch
if (ch != '\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected");
- nextch
+ reportSyntaxError("quote ' or \" expected")
+ nextch()
while (ch != endch && !eof) {
putChar(ch)
- nextch
+ nextch()
}
- nextch
+ nextch()
val str = cbuf.toString()
cbuf.length = 0
str
@@ -618,16 +616,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def pubidLiteral(): String = {
val endch = ch
if (ch!='\'' && ch != '"')
- reportSyntaxError("quote ' or \" expected");
- nextch
+ reportSyntaxError("quote ' or \" expected")
+ nextch()
while (ch != endch && !eof) {
putChar(ch)
//println("hello '"+ch+"'"+isPubIDChar(ch))
if (!isPubIDChar(ch))
reportSyntaxError("char '"+ch+"' is not allowed in public id")
- nextch
+ nextch()
}
- nextch
+ nextch()
val str = cbuf.toString
cbuf.length = 0
str
@@ -640,9 +638,9 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def extSubset(): Unit = {
var textdecl: (Option[String],Option[String]) = null
if (ch == '<') {
- nextch
+ nextch()
if (ch == '?') {
- nextch
+ nextch()
textdecl = textDecl()
} else
markupDecl1()
@@ -653,13 +651,13 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
def markupDecl1() = {
def doInclude() = {
- xToken('['); while(']' != ch) markupDecl(); nextch // ']'
+ xToken('['); while(']' != ch) markupDecl(); nextch() // ']'
}
def doIgnore() = {
- xToken('['); while(']' != ch) nextch; nextch // ']'
+ xToken('['); while(']' != ch) nextch(); nextch() // ']'
}
if ('?' == ch) {
- nextch
+ nextch()
xProcInstr // simply ignore processing instructions!
} else {
xToken('!')
@@ -668,35 +666,35 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
xComment // ignore comments
case 'E' =>
- nextch
+ nextch()
if ('L' == ch) {
- nextch
+ nextch()
elementDecl()
} else
entityDecl()
case 'A' =>
- nextch
+ nextch()
attrDecl()
case 'N' =>
- nextch
+ nextch()
notationDecl()
case '[' if inpStack.length >= extIndex =>
- nextch
- xSpaceOpt
+ nextch()
+ xSpaceOpt()
ch match {
case '%' =>
- nextch
+ nextch()
val ent = xName
xToken(';')
- xSpaceOpt
+ xSpaceOpt()
push(ent)
- xSpaceOpt
+ xSpaceOpt()
val stmt = xName
- xSpaceOpt
+ xSpaceOpt()
stmt match {
// parameter entity
@@ -704,15 +702,15 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case "IGNORE" => doIgnore()
}
case 'I' =>
- nextch
+ nextch()
ch match {
case 'G' =>
- nextch
+ nextch()
xToken("NORE")
- xSpaceOpt
+ xSpaceOpt()
doIgnore()
case 'N' =>
- nextch
+ nextch()
xToken("NCLUDE")
doInclude()
}
@@ -723,14 +721,14 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
case _ =>
curInput.reportError(pos, "unexpected character '"+ch+"', expected some markupdecl")
while (ch!='>')
- nextch
+ nextch()
}
}
}
def markupDecl(): Unit = ch match {
case '%' => // parameter entity reference
- nextch
+ nextch()
val ent = xName
xToken(';')
if (!isValidating)
@@ -740,20 +738,20 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
//peReference
case '<' =>
- nextch
+ nextch()
markupDecl1()
case _ if isSpace(ch) =>
- xSpace
+ xSpace()
case _ =>
reportSyntaxError("markupdecl: unexpected character '"+ch+"' #" + ch.toInt)
- nextch
+ nextch()
}
/** "rec-xml/#ExtSubset" pe references may not occur within markup declarations
*/
def intSubset() {
//Console.println("(DEBUG) intSubset()")
- xSpace
+ xSpace()
while (']' != ch)
markupDecl()
}
@@ -762,16 +760,16 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
*/
def elementDecl() {
xToken("EMENT")
- xSpace
+ xSpace()
val n = xName
- xSpace
+ xSpace()
while ('>' != ch) {
//Console.println("["+ch+"]")
putChar(ch)
- nextch
+ nextch()
}
//Console.println("END["+ch+"]")
- nextch
+ nextch()
val cmstr = cbuf.toString()
cbuf.length = 0
handle.elemDecl(n, cmstr)
@@ -782,44 +780,44 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def attrDecl() = {
xToken("TTLIST")
- xSpace
+ xSpace()
val n = xName
- xSpace
+ xSpace()
var attList: List[AttrDecl] = Nil
// later: find the elemDecl for n
while ('>' != ch) {
val aname = xName
- xSpace
+ xSpace()
// could be enumeration (foo,bar) parse this later :-/
while ('"' != ch && '\'' != ch && '#' != ch && '<' != ch) {
if (!isSpace(ch))
cbuf.append(ch)
- nextch
+ nextch()
}
val atpe = cbuf.toString
cbuf.length = 0
val defdecl: DefaultDecl = ch match {
case '\'' | '"' =>
- DEFAULT(false, xAttributeValue())
+ DEFAULT(fixed = false, xAttributeValue())
case '#' =>
- nextch
+ nextch()
xName match {
- case "FIXED" => xSpace ; DEFAULT(true, xAttributeValue())
+ case "FIXED" => xSpace() ; DEFAULT(fixed = true, xAttributeValue())
case "IMPLIED" => IMPLIED
case "REQUIRED" => REQUIRED
}
case _ =>
null
}
- xSpaceOpt
+ xSpaceOpt()
attList ::= AttrDecl(aname, atpe, defdecl)
cbuf.length = 0
}
- nextch
+ nextch()
handle.attListDecl(n, attList.reverse)
}
@@ -828,41 +826,40 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def entityDecl() = {
var isParameterEntity = false
- var entdef: EntityDef = null
xToken("NTITY")
- xSpace
+ xSpace()
if ('%' == ch) {
- nextch
+ nextch()
isParameterEntity = true
- xSpace
+ xSpace()
}
val n = xName
- xSpace
+ xSpace()
ch match {
case 'S' | 'P' => //sy
val extID = externalID()
if (isParameterEntity) {
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
handle.parameterEntityDecl(n, ExtDef(extID))
} else { // notation?
- xSpace
+ xSpace()
if ('>' != ch) {
xToken("NDATA")
- xSpace
+ xSpace()
val notat = xName
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
handle.unparsedEntityDecl(n, extID, notat)
} else {
- nextch
+ nextch()
handle.parsedEntityDecl(n, ExtDef(extID))
}
}
case '"' | '\'' =>
val av = xEntityValue()
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
if (isParameterEntity)
handle.parameterEntityDecl(n, IntDef(av))
@@ -877,29 +874,29 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
* }}} */
def notationDecl() {
xToken("OTATION")
- xSpace
+ xSpace()
val notat = xName
- xSpace
+ xSpace()
val extID = if (ch == 'S') {
externalID()
}
else if (ch == 'P') {
- /** PublicID (without system, only used in NOTATION) */
- nextch
+ /* PublicID (without system, only used in NOTATION) */
+ nextch()
xToken("UBLIC")
- xSpace
+ xSpace()
val pubID = pubidLiteral()
- xSpaceOpt
+ xSpaceOpt()
val sysID = if (ch != '>')
systemLiteral()
else
- null;
+ null
new PublicID(pubID, sysID)
} else {
- reportSyntaxError("PUBLIC or SYSTEM expected");
+ reportSyntaxError("PUBLIC or SYSTEM expected")
scala.sys.error("died parsing notationdecl")
}
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
handle.notationDecl(notat, extID)
}
@@ -916,7 +913,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch
curInput = replacementText(entityName)
- nextch
+ nextch()
}
def pushExternal(systemId: String) {
@@ -927,7 +924,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
ch
curInput = externalSource(systemId)
- nextch
+ nextch()
}
def pop() {
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/library/scala/xml/parsing/MarkupParserCommon.scala
index da640484e0..57c1651558 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/library/scala/xml/parsing/MarkupParserCommon.scala
@@ -6,11 +6,11 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.io.Source
-import scala.xml.dtd._
import scala.annotation.switch
import Utility.Escapes.{ pairs => unescape }
@@ -39,7 +39,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
*/
protected def xTag(pscope: NamespaceType): (String, AttributesType) = {
val name = xName
- xSpaceOpt
+ xSpaceOpt()
(name, mkAttributes(name, pscope))
}
@@ -50,7 +50,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
*/
def xProcInstr: ElementType = {
val n = xName
- xSpaceOpt
+ xSpaceOpt()
xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>")
}
@@ -78,7 +78,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
private def takeUntilChar(it: Iterator[Char], end: Char): String = {
val buf = new StringBuilder
- while (it.hasNext) it.next match {
+ while (it.hasNext) it.next() match {
case `end` => return buf.toString
case ch => buf append ch
}
@@ -92,7 +92,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
if (xName != startName)
errorNoEnd(startName)
- xSpaceOpt
+ xSpaceOpt()
xToken('>')
}
@@ -139,9 +139,9 @@ private[scala] trait MarkupParserCommon extends TokenTests {
val buf = new StringBuilder
val it = attval.iterator.buffered
- while (it.hasNext) buf append (it.next match {
+ while (it.hasNext) buf append (it.next() match {
case ' ' | '\t' | '\n' | '\r' => " "
- case '&' if it.head == '#' => it.next ; xCharRef(it)
+ case '&' if it.head == '#' => it.next() ; xCharRef(it)
case '&' => attr_unescape(takeUntilChar(it, ';'))
case c => c
})
@@ -158,11 +158,11 @@ private[scala] trait MarkupParserCommon extends TokenTests {
Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _)
def xCharRef(it: Iterator[Char]): String = {
- var c = it.next
- Utility.parseCharRef(() => c, () => { c = it.next }, reportSyntaxError _, truncatedError _)
+ var c = it.next()
+ Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _)
}
- def xCharRef: String = xCharRef(() => ch, () => nextch)
+ def xCharRef: String = xCharRef(() => ch, () => nextch())
/** Create a lookahead reader which does not influence the input */
def lookahead(): BufferedIterator[Char]
@@ -195,20 +195,20 @@ private[scala] trait MarkupParserCommon extends TokenTests {
}
def xToken(that: Char) {
- if (ch == that) nextch
+ if (ch == that) nextch()
else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch))
}
def xToken(that: Seq[Char]) { that foreach xToken }
/** scan [S] '=' [S]*/
- def xEQ() = { xSpaceOpt; xToken('='); xSpaceOpt }
+ def xEQ() = { xSpaceOpt(); xToken('='); xSpaceOpt() }
/** skip optional space S? */
- def xSpaceOpt() = while (isSpace(ch) && !eof) nextch
+ def xSpaceOpt() = while (isSpace(ch) && !eof) nextch()
/** scan [3] S ::= (#x20 | #x9 | #xD | #xA)+ */
def xSpace() =
- if (isSpace(ch)) { nextch; xSpaceOpt }
+ if (isSpace(ch)) { nextch(); xSpaceOpt() }
else xHandleError(ch, "whitespace expected")
/** Apply a function and return the passed value */
@@ -241,7 +241,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
truncatedError("") // throws TruncatedXMLControl in compiler
sb append ch
- nextch
+ nextch()
}
unreachable
}
@@ -254,7 +254,7 @@ private[scala] trait MarkupParserCommon extends TokenTests {
private def peek(lookingFor: String): Boolean =
(lookahead() take lookingFor.length sameElements lookingFor.iterator) && {
// drop the chars from the real reader (all lookahead + orig)
- (0 to lookingFor.length) foreach (_ => nextch)
+ (0 to lookingFor.length) foreach (_ => nextch())
true
}
}
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
index 22dd450072..56ac185f47 100644
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ b/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package parsing
import factory.NodeFactory
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/library/scala/xml/parsing/TokenTests.scala
index c9cafaeea1..8dd9cdfaa3 100644
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ b/src/library/scala/xml/parsing/TokenTests.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
/**
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
index 0edea043a5..1b20901249 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
@@ -8,13 +8,13 @@
-package scala.xml
+package scala
+package xml
package parsing
import scala.xml.dtd._
-import scala.util.logging.Logged
-abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
+abstract class ValidatingMarkupHandler extends MarkupHandler {
var rootLabel:String = _
var qStack: List[Int] = Nil
@@ -25,20 +25,6 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
final override val isValidating = true
- override def log(msg: String) {}
-
- /*
- override def checkChildren(pos: Int, pre: String, label:String,ns:NodeSeq): Unit = {
- Console.println("checkChildren()");
- val decl = lookupElemDecl(label);
- // @todo: nice error message
- val res = decl.contentModel.validate(ns);
- Console.println("res = "+res);
- if(!res)
- //sys.error("invalid!");
- }
- */
-
override def endDTD(n:String) = {
rootLabel = n
}
@@ -50,8 +36,8 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
log("advanceDFA(trans): " + trans)
trans.get(ContentModel.ElemName(label)) match {
case Some(qNew) => qCurrent = qNew
- case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys);
- }
+ case _ => reportValidationError(pos, "DTD says, wrong element, expected one of "+trans.keys)
+ }
}
// advance in current automaton
log("[qCurrent = "+qCurrent+" visiting "+label+"]")
@@ -106,7 +92,7 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
}
final override def notationDecl(notat: String, extID: ExternalID) {
- decls = NotationDecl(notat, extID) :: decls;
+ decls = NotationDecl(notat, extID) :: decls
}
final override def peReference(name: String) {
@@ -115,5 +101,4 @@ abstract class ValidatingMarkupHandler extends MarkupHandler with Logged {
/** report a syntax error */
def reportValidationError(pos: Int, str: String): Unit
-
}
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/library/scala/xml/parsing/XhtmlEntities.scala
index 1bb843818a..3683af202c 100644
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ b/src/library/scala/xml/parsing/XhtmlEntities.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.xml.dtd.{ IntDef, ParsedEntityDecl }
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/library/scala/xml/parsing/XhtmlParser.scala
index d08cb1fa9c..6ce5bec8d0 100644
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ b/src/library/scala/xml/parsing/XhtmlParser.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package parsing
import scala.io.Source
@@ -26,5 +27,5 @@ class XhtmlParser(val input: Source) extends ConstructingHandler with MarkupPars
* @author Burak Emir
*/
object XhtmlParser {
- def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document
+ def apply(source: Source): NodeSeq = new XhtmlParser(source).initialize.document()
}
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/library/scala/xml/persistent/CachedFileStorage.scala
index 916a1a0cf7..a1489ef3f4 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/library/scala/xml/persistent/CachedFileStorage.scala
@@ -6,14 +6,15 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package persistent
import java.io.{ File, FileOutputStream }
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.lang.Thread
-import scala.util.logging.Logged
+
import scala.collection.Iterator
/** Mutable storage of immutable xml trees. Everything is kept in memory,
@@ -25,7 +26,7 @@ import scala.collection.Iterator
*
* @author Burak Emir
*/
-abstract class CachedFileStorage(private val file1: File) extends Thread with Logged {
+abstract class CachedFileStorage(private val file1: File) extends Thread {
private val file2 = new File(file1.getParent, file1.getName+"$")
@@ -76,8 +77,8 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
log("[load]\nloading "+theFile)
val src = Source.fromFile(theFile)
log("parsing "+theFile)
- val res = ConstructingParser.fromSource(src,false).document.docElem(0)
- switch
+ val res = ConstructingParser.fromSource(src,preserveWS = false).document.docElem(0)
+ switch()
log("[load done]")
res.child.iterator
}
@@ -94,7 +95,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
// @todo: optimize
val storageNode = <nodes>{ nodes.toList }</nodes>
val w = Channels.newWriter(c, "utf-8")
- XML.write(w, storageNode, "utf-8", true, null)
+ XML.write(w, storageNode, "utf-8", xmlDecl = true, doctype = null)
log("writing to "+theFile)
@@ -102,7 +103,7 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
c.close
fos.close
dirty = false
- switch
+ switch()
log("[save done]")
}
@@ -111,8 +112,8 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
override def run = {
log("[run]\nstarting storage thread, checking every "+interval+" ms")
while (true) {
- Thread.sleep( this.interval )
- save
+ Thread.sleep( this.interval.toLong )
+ save()
}
}
@@ -120,6 +121,9 @@ abstract class CachedFileStorage(private val file1: File) extends Thread with Lo
* update. */
def flush() = {
this.dirty = true
- save
+ save()
}
+
+ @deprecated("This method and its usages will be removed. Use a debugger to debug code.", "2.11")
+ def log(msg: String): Unit = {}
}
diff --git a/src/library/scala/xml/persistent/Index.scala b/src/library/scala/xml/persistent/Index.scala
index defaf67d52..9ee45e7086 100644
--- a/src/library/scala/xml/persistent/Index.scala
+++ b/src/library/scala/xml/persistent/Index.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package persistent
/** an Index returns some unique key that is part of a node
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/library/scala/xml/persistent/SetStorage.scala
index 20a5bb6767..8db56a2e71 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/library/scala/xml/persistent/SetStorage.scala
@@ -7,7 +7,8 @@
\* */
-package scala.xml
+package scala
+package xml
package persistent
import scala.collection.mutable
@@ -20,16 +21,14 @@ import java.io.File
*/
class SetStorage(file: File) extends CachedFileStorage(file) {
- private var theSet: mutable.HashSet[Node] = new mutable.HashSet[Node]
+ private val theSet = mutable.HashSet[Node]()
// initialize
{
val it = super.initialNodes
dirty = it.hasNext
- for(x <- it) {
- theSet += x;
- }
+ theSet ++= it
}
/* forwarding methods to hashset*/
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/library/scala/xml/pull/XMLEvent.scala
index a266380f87..3beb3648e7 100644
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ b/src/library/scala/xml/pull/XMLEvent.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package pull
/** An XML event for pull parsing. All events received during
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/library/scala/xml/pull/XMLEventReader.scala
index 428c305055..76e51e17fd 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/library/scala/xml/pull/XMLEventReader.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.xml
+package scala
+package xml
package pull
import scala.io.Source
@@ -139,10 +140,10 @@ trait ProducerConsumerIterator[T >: Null] extends Iterator[T] {
def hasNext = !eos && (buffer != null || fillBuffer)
def next() = {
- if (eos) throw new NoSuchElementException("ProducerConsumerIterator")
- if (buffer == null) fillBuffer
+ if (eos()) throw new NoSuchElementException("ProducerConsumerIterator")
+ if (buffer == null) fillBuffer()
- drainBuffer
+ drainBuffer()
}
def available() = isElement(buffer) || isElement(queue.peek)
diff --git a/src/library/scala/xml/pull/package.scala b/src/library/scala/xml/pull/package.scala
index 3742c55513..0e3019446b 100644
--- a/src/library/scala/xml/pull/package.scala
+++ b/src/library/scala/xml/pull/package.scala
@@ -1,4 +1,5 @@
-package scala.xml
+package scala
+package xml
/**
* Classes needed to view an XML document as a series of events. The document
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/library/scala/xml/transform/BasicTransformer.scala
index 1402ccd6aa..c98339fd67 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/library/scala/xml/transform/BasicTransformer.scala
@@ -8,7 +8,8 @@
-package scala.xml
+package scala
+package xml
package transform
/** A class for XML transformations.
@@ -53,7 +54,7 @@ abstract class BasicTransformer extends Function1[Node,Node]
def apply(n: Node): Node = {
val seq = transform(n)
if (seq.length > 1)
- throw new UnsupportedOperationException("transform must return single node for root");
+ throw new UnsupportedOperationException("transform must return single node for root")
else seq.head
}
}
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/library/scala/xml/transform/RewriteRule.scala
index 1dca495a10..1399ee538d 100644
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ b/src/library/scala/xml/transform/RewriteRule.scala
@@ -8,11 +8,12 @@
-package scala.xml
+package scala
+package xml
package transform
-/** a RewriteRule, when applied to a term, yields either
- * the resulting of rewriting or the term itself it the rule
+/** A RewriteRule, when applied to a term, yields either
+ * the result of rewriting the term or the term itself if the rule
* is not applied.
*
* @author Burak Emir
diff --git a/src/library/scala/xml/transform/RuleTransformer.scala b/src/library/scala/xml/transform/RuleTransformer.scala
index 85e92e5773..3a222ba759 100644
--- a/src/library/scala/xml/transform/RuleTransformer.scala
+++ b/src/library/scala/xml/transform/RuleTransformer.scala
@@ -6,9 +6,8 @@
** |/ **
\* */
-
-
-package scala.xml
+package scala
+package xml
package transform
class RuleTransformer(rules: RewriteRule*) extends BasicTransformer {
diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala
index 13b1fd58e0..1c0c7c4a96 100644
--- a/src/manual/scala/man1/scalac.scala
+++ b/src/manual/scala/man1/scalac.scala
@@ -146,13 +146,6 @@ object scalac extends Command {
CmdOption("sourcepath", Argument("path")),
"Specify location(s) of source files."),
Definition(
- CmdOptionBound("target:", Argument("target")),
- SeqPara(
- "Specify which backend to use (" & Mono("jvm-1.5," &
- "msil") & ").",
- "The default value is " & Mono("\"jvm-1.5\"") & " (was " &
- Mono("\"jvm-1.4\"") & " up to Scala version 2.6.1).")),
- Definition(
CmdOption("toolcp", Argument("path")),
"Add to the runner classpath."),
Definition(
@@ -182,19 +175,6 @@ object scalac extends Command {
Section("Advanced Options",
DefinitionList(
Definition(
- CmdOption("Xassem-extdirs", Argument("dirs")),
- "(Requires " & Mono("-target:msil") &
- ") List of directories containing assemblies." &
- " default:" & Mono("lib") & "."),
- Definition(
- CmdOption("Xassem-name", Argument("file")),
- "(Requires " & Mono("-target:msil") &
- ") Name of the output assembly."),
- Definition(
- CmdOption("Xassem-path", Argument("path")),
- "(Requires " & Mono("-target:msil") &
- ") List of assemblies referenced by the program."),
- Definition(
CmdOption("Xcheck-null"),
"Warn upon selection of nullable reference"),
Definition(
@@ -290,10 +270,6 @@ object scalac extends Command {
CmdOption("Xsource-reader", Argument("classname")),
"Specify a custom method for reading source files."),
Definition(
- CmdOption("Xsourcedir", Argument("path")),
- "(Requires " & Mono("-target:msil") &
- ") Mirror source folder structure in output directory.."),
- Definition(
CmdOption("Xverify"),
"Verify generic signatures in generated bytecode."),
Definition(
diff --git a/src/manual/scala/tools/docutil/resources/index.html b/src/manual/scala/tools/docutil/resources/index.html
index aaef94d400..18e2343930 100644
--- a/src/manual/scala/tools/docutil/resources/index.html
+++ b/src/manual/scala/tools/docutil/resources/index.html
@@ -8,7 +8,7 @@
<meta http-equiv="Content-Style-Type" content="text/css"/>
<meta http-equiv="Content-Language" content="en"/>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1"/>
- <meta name="Copyright" content="(C) 2002-2012 LAMP/EPFL"/>
+ <meta name="Copyright" content="(C) 2002-2013 LAMP/EPFL"/>
<meta name="Language" content="en"/>
<meta name="Description" content="The Scala Programming Language"/>
<meta name="Author" content="Stephane Micheloud"/>
@@ -180,7 +180,7 @@
<hr/>
<div style="font-size:x-small;">
- Copyright (c) 2002-2012 <a href="http://www.epfl.ch/">EPFL</a>,
+ Copyright (c) 2002-2013 <a href="http://www.epfl.ch/">EPFL</a>,
Lausanne, unless specified otherwise.<br/>
All rights reserved.
</div>
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java b/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
deleted file mode 100644
index 59bbeee3a4..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Assembly.java
+++ /dev/null
@@ -1,253 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.AssemblyDef;
-import ch.epfl.lamp.compiler.msil.util.Table.ModuleDef;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.io.File;
-import java.io.FileNotFoundException;
-
-/**
- * Defines an Assembly, which is a reusable, versionable, and self-describing
- * building block of a common language runtime application.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Assembly extends CustomAttributeProvider {
-
- //##########################################################################
- // static members
-
- // all the assemblies
- public static final HashMap assemblies = new HashMap();
-
- /** Loads an assembly from the specified path. */
- public static Assembly LoadFrom(String assemblyFileName) {
- File afile = new File(assemblyFileName);
- return LoadFrom(afile.getParentFile(), afile.getName());
- }
-
- /** Loads an assembly with the given name from the given directory. */
- public static Assembly LoadFrom(File dir, String name) {
- File file = null;
- PEFile pefile = null;
-// try {
-// if (dir == null)
-// dir = new File(".");
-// dir = dir.getCanonicalFile();
-// } catch (java.io.IOException e) {}
-
- if (name.toUpperCase().endsWith(".EXE") || name.toUpperCase().endsWith(".DLL")) {
- file = new File(dir, name);
- pefile = getPEFile(file);
- name = name.substring(0, name.length() - 4);
- }
-
- File adir = pefile == null ? new File(dir, name) : null;
-
- if (pefile == null) {
- file = new File(dir, name + ".dll");
- pefile = getPEFile(file);
- }
- if (pefile == null) {
- file = new File(dir, name + ".DLL");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".dll");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".DLL");
- pefile = getPEFile(file);
- }
-
- if (pefile == null) {
- file = new File(dir, name + ".exe");
- pefile = getPEFile(file);
- }
- if (pefile == null) {
- file = new File(dir, name + ".EXE");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".exe");
- pefile = getPEFile(file);
- }
- if (pefile == null && adir.exists()) {
- file = new File(adir, name + ".EXE");
- pefile = getPEFile(file);
- }
-
- if (pefile == null)
- throw new RuntimeException("Cannot find assembly " + new File(dir, name));
- return getPEAssembly(pefile);
- }
-
- private static Assembly getPEAssembly(PEFile pefile) {
- AssemblyDef assem = pefile.AssemblyDef;
- if (assem == null)
- throw new RuntimeException("File " + pefile
- + " does not contain a manifest");
- assem.readRow(1);
- String name = pefile.getString(assem.Name);
- Assembly a = (Assembly) assemblies.get(name);
- if (a != null) {
- return a;
- }
-
- AssemblyName an = new AssemblyName();
- an.Name = pefile.getString(assem.Name);
- an.Version = new Version(assem.MajorVersion, assem.MinorVersion,
- assem.BuildNumber, assem.RevisionNumber);
- an.SetPublicKey(pefile.getBlob(assem.PublicKey));
- return new PEAssembly(pefile, an);
- }
-
- protected static PEFile getPEFile(File f) {
- PEFile pefile = null;
- try { pefile = new PEFile(f.getAbsolutePath()); }
- catch (FileNotFoundException e) {}
- catch (RuntimeException e) {
- java.lang.System.out.println("swallowed RuntimeException at getPEFile");
- }
- return pefile;
- }
-
- //##########################################################################
- // public fields
-
- /** The entry point of this assembly. */
- public MethodInfo EntryPoint;
-
- /** the display name of the assembly. */
- public final String FullName;
-
- //##########################################################################
- // constructor
-
- protected Assembly(AssemblyName an, boolean external) {
- assemblyName = an;
- FullName = an.toString();
- if(external) {
- assemblies.put(an.Name, this);
- }
- //System.out.println("assemblies after adding the current one: " + assemblies);
- }
-
- protected Assembly(AssemblyName an) {
- this(an, false);
- }
-
- protected static Assembly getAssembly(String name) {
- return (Assembly) assemblies.get(name);
- }
-
- //##########################################################################
- // instrumental methods
-
- /** @return the file from which this assembly was loaded. */
- public File getFile() {
- throw new RuntimeException("Not supported");
- }
-
- /** Gets the specified module in this assembly. Works on filenames. */
- public Module GetModule(String name) {
- initModules();
- return (Module)modulesMap.get(name);
- }
-
- /** Get all the modules of the assembly. */
- public Module[] GetModules() {
- initModules();
- return (Module[])modulesMap.values().
- toArray(new Module[modulesMap.size()]);
- }
-
- /** Get the corresponding type. */
- public Type GetType(String name) {
- initModules();
- Iterator modules = modulesMap.values().iterator();
- Type t = null;
- while (t == null && modules.hasNext()) {
- t = ((Module)modules.next()).GetType(name);
- }
- return t;
- }
-
- /** @return an array of all types defined in the assembly. */
- public synchronized Type[] GetTypes() {
- if (types != null)
- return (Type[])types.clone();
- initModules();
-
- Iterator modules = modulesMap.values().iterator();
- Type[] newTypes = ((Module)modules.next()).GetTypes();
- while (modules.hasNext()) {
- Module module = (Module)modules.next();
- Type[] mtypes = module.GetTypes();
- Type[] oldTypes = newTypes;
- newTypes = new Type[oldTypes.length + mtypes.length];
- System.arraycopy(oldTypes, 0, newTypes, 0, oldTypes.length);
- System.arraycopy(mtypes, 0, newTypes, oldTypes.length, mtypes.length);
- }
- types = newTypes;
- return (Type[]) types.clone();
- }
-
- public AssemblyName GetName() {
- return assemblyName;
- }
-
- public String toString() {
- return FullName;
- }
-
- //##########################################################################
- // protected members
-
- // the assembly name
- protected final AssemblyName assemblyName;
-
- // all the types exported by the assembly
- protected Type[] types = null;
-
- // the module defined in this assembly (only one right now)
- private final HashMap/*<String, Module>*/ modulesMap = new HashMap();
-
- protected void addType(Type type) {
- Type.addType(type);
- }
-
- protected void addModule(String name, Module module) {
- modulesMap.put(name, module);
- }
-
- private boolean initModules = true;
- protected final void initModules() {
- if (initModules) {
- loadModules();
- initModules = false;
- }
- }
-
- /** used for lazy construction of the Assembly. */
- protected abstract void loadModules();
-
- void dumpTypes() {
- Type[] types = GetTypes();
- for (int i = 0; i < types.length; i++)
- System.out.println(types[i]);
- }
-
- //##########################################################################
-
-} // class Assembly
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java b/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
deleted file mode 100644
index acdcb32e33..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/AssemblyName.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import javax.crypto.Mac;
-
-import java.security.MessageDigest;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-/**
- * Fully describes an assembly's unique identity.
- * Right now it's only the name
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class AssemblyName {
-
- //##########################################################################
- // public interface
-
- /** The simple, unencrypted name of the assembly. */
- public String Name;
-
- /**
- * Gets or sets the major, minor, revision, and build numbers
- * of the assembly.
- */
- public Version Version;
-
- /**
- * Gets a strong name consisting of a public key, a given name,
- * and version parts.
- */
- public byte[] GetPublicKeyToken() {
- return publicKeyToken == null ? null : (byte[]) publicKeyToken.clone();
- }
-
- /**
- * Sets a strong name consisting of a public key, a given name,
- * and version parts.
- */
- public void SetPublicKeyToken(byte[] key) {
- this.publicKeyToken = key.length == 0 ? null : (byte[]) key.clone();
- }
-
- /**
- * Returns the public key identifying the originator of the assembly.
- */
- public byte[] GetPublicKey() {
- return publicKey == null ? null : (byte[]) publicKey.clone();
- }
-
- /**
- * Sets the public key identifying the originator of the assembly.
- */
- public void SetPublicKey(byte[] key) {
- if (key.length > 0) {
- this.publicKey = (byte[]) key.clone();
- byte[] hash = sha.digest(key);
- byte[] keyToken = new byte[8];
- for (int i = 0; i < keyToken.length; i++)
- keyToken[i] = hash[hash.length - 1 - i];
- this.publicKeyToken = keyToken;
- //System.out.println("Pubic key and key token of assembly " + this + ":");
- //System.out.println("\tPublic key = " + Table.bytes2hex(key));
- //System.out.println("\tKey token = " + Table.bytes2hex(keyToken));
- }
- }
-
- public String toString() {
- return Name + ", Version=" + Version;
- }
-
- //##########################################################################
-
- private byte[] publicKeyToken;
-
- private byte[] publicKey;
-
- private static final MessageDigest sha;
- static {
- MessageDigest md = null;
- try {
- md = MessageDigest.getInstance("SHA");
- } catch (java.security.NoSuchAlgorithmException e) {}
- sha = md;
- }
-
- //##########################################################################
-
-} // class AssemblyName
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java b/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
deleted file mode 100644
index 0f2c4e6764..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Attribute.java
+++ /dev/null
@@ -1,654 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Signature;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Iterator;
-import java.nio.ByteBuffer;
-import java.nio.ByteOrder;
-import java.io.UnsupportedEncodingException;
-
-/**
- * Describes custom attribute instances.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class Attribute {
-
- //##########################################################################
-
- private final ConstructorInfo constr;
-
- private final byte[] value;
-
- Attribute(ConstructorInfo constr, byte[] value) {
- assert constr != null;
- this.constr = constr;
- assert value != null : constr.toString();
- this.value = value;
- }
-
- //##########################################################################
- // public interface
-
- /** @return the type (class) of the attribute. */
- public Type GetType() { return constr.DeclaringType; }
-
- /** @return the constructor of this attribute. */
- public ConstructorInfo getConstructor() {
- return constr;
- }
-
- /** @return the Blob with serialized constructor & named arguments. */
- public byte[] getValue() {
- byte[] value = new byte[this.value.length];
- System.arraycopy(this.value, 0, value, 0, value.length);
- return value;
- }
-
- /**@return an array with the arguments to the attribute's constructor. */
- public Object[] getConstructorArguments() {
- parseBlob();
- Object[] cas = new Object[constrArgs.length];
- System.arraycopy(constrArgs, 0, cas, 0, cas.length);
- return cas;
- }
-
- /** @return the named argument with the given name. */
- public NamedArgument getNamedArgument(String name) {
- return (NamedArgument)namedArgs.get(name);
- }
-
- /** @return an array of all named arguments for this attribute. */
- public NamedArgument[] getNamedArguments() {
- NamedArgument[] nargs =
- (NamedArgument[])namedArgs.values().toArray(NamedArgument.EMPTY);
- return nargs;
- }
-
- /** @return a string representation of this attribute. */
- public String toString() {
- parseBlob();
- ParameterInfo[] params = constr.GetParameters();
- assert params.length == constrArgs.length : this.constr;
- StringBuffer str = new StringBuffer();
- str.append('[');
- str.append(constr.DeclaringType.FullName);
- str.append('(');
- for (int i = 0; i < constrArgs.length; i++) {
- if (i > 0)
- str.append(", ");
- Type t = params[i].ParameterType;
- if (t.IsEnum()) {
- str.append('(');
- str.append(t.FullName);
- str.append(')');
- }
- formatValue(str, constrArgs[i]);
- }
- NamedArgument[] nargs = getNamedArguments();
- for (int i = 0; i < nargs.length; i++) {
- str.append(", ").append(nargs[i]);
- }
- str.append(")]");
- return str.toString();
- }
-
- //#########################################################################
-
- private static final Map type2id = new HashMap();
- private static final Map id2type = new HashMap();
- static {
- map("Boolean", Signature.ELEMENT_TYPE_BOOLEAN);
- map("Char", Signature.ELEMENT_TYPE_CHAR);
- map("SByte", Signature.ELEMENT_TYPE_I1);
- map("Byte", Signature.ELEMENT_TYPE_U1);
- map("Int16", Signature.ELEMENT_TYPE_I2);
- map("UInt16", Signature.ELEMENT_TYPE_U2);
- map("Int32", Signature.ELEMENT_TYPE_I4);
- map("UInt32", Signature.ELEMENT_TYPE_U4);
- map("Int64", Signature.ELEMENT_TYPE_I8);
- map("UInt64", Signature.ELEMENT_TYPE_U8);
- map("Single", Signature.ELEMENT_TYPE_R4);
- map("Double", Signature.ELEMENT_TYPE_R8);
- map("String", Signature.ELEMENT_TYPE_STRING);
- map("Type", Signature.X_ELEMENT_TYPE_TYPE);
- map("Object", Signature.ELEMENT_TYPE_OBJECT);
- }
- private static void map(String type, int id) {
- Type t = Type.GetType("System." + type);
- assert type != null : type + " -> " + id;
- Integer i = new Integer(id);
- type2id.put(t, i);
- id2type.put(i, t);
- }
- private static int getTypeId(Type type) {
- Integer id = (Integer)type2id.get(type);
- assert id != null : type;
- return id.intValue();
- }
-
- private Object[] constrArgs;
- private Map namedArgs;
- private ByteBuffer buf;
-
- private void parseBlob() {
- try { parseBlob0(); }
- catch (RuntimeException e) {
- throw new RuntimeException(PEFile.bytes2hex(value), e);
- }
- }
-
- private void parseBlob0() {
- if (buf != null)
- return;
- buf = ByteBuffer.wrap(value); // Sec. 23.3 in Partition II of CLR Spec.
- buf.order(ByteOrder.LITTLE_ENDIAN);
-
- short sig = buf.getShort(); // Prolog
- assert sig == 1 : PEFile.bytes2hex(value);
- ParameterInfo[] params = constr.GetParameters();
- constrArgs = new Object[params.length];
- for (int i = 0; i < params.length; i++) {
- constrArgs[i] = parseFixedArg(params[i].ParameterType); // FixedArg
- }
-
- int ncount = buf.getShort(); // NumNamed
- namedArgs = new LinkedHashMap();
- for (int i = 0; i < ncount; i++) {
- int designator = buf.get(); // designator one of 0x53 (FIELD) or 0x54 (PROPERTY)
- assert designator == Signature.X_ELEMENT_KIND_FIELD
- || designator == Signature.X_ELEMENT_KIND_PROPERTY
- : "0x" + PEFile.byte2hex(designator);
- Type type = parseFieldOrPropTypeInNamedArg(); // FieldOrPropType
- String name = parseString(); // FieldOrPropName
- Object value = parseFixedArg(type); // FixedArg
- NamedArgument narg =
- new NamedArgument(designator, name, type, value);
- namedArgs.put(name, narg);
- }
- }
-
- private Object parseFixedArg(Type type) {
- if (type.IsArray())
- return parseArray(type.GetElementType());
- else
- return parseElem(type);
- }
-
- /* indicates whether the "simple" case (the other is "enum") of the first row
- in the Elem production should be taken. */
- private boolean isSimpleElem(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- switch(id){
- case Signature.ELEMENT_TYPE_STRING:
- case Signature.X_ELEMENT_TYPE_TYPE:
- case Signature.ELEMENT_TYPE_OBJECT:
- return false;
- default:
- return true;
- }
- }
-
- /* indicates whether the second row in the Elem production
- should be taken (and more specifically, "string" case within that row). */
- private boolean isStringElem(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- return id == Signature.ELEMENT_TYPE_STRING;
- }
-
- /* indicates whether the second row in the Elem production
- should be taken (and more specifically, "type" case within that row). */
- private boolean isTypeElem(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- return id == Signature.X_ELEMENT_TYPE_TYPE;
- }
-
- /* indicates whether the third row in the Elem production
- should be taken (and more specifically, "boxed" case within that row). */
- private boolean isSystemObject(Type type) {
- if(!type2id.containsKey(type)) return false;
- int id = getTypeId(type);
- return id == Signature.ELEMENT_TYPE_OBJECT;
- }
-
- private Object parseElem(Type type) {
- // simple or enum
- if (isSimpleElem(type)) return parseVal(getTypeId(type));
- if (type.IsEnum()) return parseVal(getTypeId(type.getUnderlyingType()));
- // string or type
- if (isStringElem(type)) return parseString();
- if (isTypeElem(type)) return getTypeFromSerString();
- // boxed valuetype, please notice that a "simple" boxed valuetype is preceded by 0x51
- if (isSystemObject(type)) {
- Type boxedT = parse0x51();
- if(boxedT.IsEnum()) {
- return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT.getUnderlyingType())));
- } else {
- return new BoxedArgument(boxedT, parseVal(getTypeId(boxedT))); // TODO dead code?
- }
- } else {
- Type boxedT = parseType();
- return parseVal(getTypeId(boxedT));
- }
- }
-
- /* this does not parse an Elem, but a made-up production (Element). Don't read too much into this method name! */
- private Object parseVal(int id) {
- switch (id) {
- case Signature.ELEMENT_TYPE_BOOLEAN:
- return new Boolean(buf.get() == 0 ? false : true);
- case Signature.ELEMENT_TYPE_CHAR:
- return new Character(buf.getChar());
- case Signature.ELEMENT_TYPE_I1:
- case Signature.ELEMENT_TYPE_U1:
- return new Byte(buf.get()); // TODO U1 not the same as I1
- case Signature.ELEMENT_TYPE_I2:
- case Signature.ELEMENT_TYPE_U2:
- return new Short(buf.getShort()); // TODO U2 not the same as I2
- case Signature.ELEMENT_TYPE_I4:
- case Signature.ELEMENT_TYPE_U4:
- return new Integer(buf.getInt()); // TODO U4 not the same as I4
- case Signature.ELEMENT_TYPE_I8:
- case Signature.ELEMENT_TYPE_U8:
- return new Long(buf.getLong()); // TODO U8 not the same as I8
- case Signature.ELEMENT_TYPE_R4:
- return new Float(buf.getFloat());
- case Signature.ELEMENT_TYPE_R8:
- return new Double(buf.getDouble());
- case Signature.X_ELEMENT_TYPE_TYPE:
- return getTypeFromSerString();
- case Signature.ELEMENT_TYPE_STRING:
- return parseString();
- default:
- throw new RuntimeException("Shouldn't have called parseVal with: " + id);
- }
- }
-
- private Object parseArray(Type type) {
- if (type.IsEnum())
- return parseArray(type.getUnderlyingType());
- return parseArray(getTypeId(type));
- }
-
- private Object parseArray(int id) {
- switch (id) {
- case Signature.ELEMENT_TYPE_BOOLEAN:
- return parseBooleanArray();
- case Signature.ELEMENT_TYPE_CHAR:
- return parseCharArray();
- case Signature.ELEMENT_TYPE_I1:
- case Signature.ELEMENT_TYPE_U1: // TODO U1 not the same as I1
- return parseByteArray();
- case Signature.ELEMENT_TYPE_I2:
- case Signature.ELEMENT_TYPE_U2:
- return parseShortArray();
- case Signature.ELEMENT_TYPE_I4:
- case Signature.ELEMENT_TYPE_U4:
- return parseIntArray();
- case Signature.ELEMENT_TYPE_I8:
- case Signature.ELEMENT_TYPE_U8:
- return parseLongArray();
- case Signature.ELEMENT_TYPE_R4:
- return parseFloatArray();
- case Signature.ELEMENT_TYPE_R8:
- return parseDoubleArray();
- case Signature.ELEMENT_TYPE_STRING:
- return parseStringArray();
- case Signature.X_ELEMENT_TYPE_ENUM:
- return parseArray(getTypeFromSerString());
- default:
- throw new RuntimeException("Unknown type id: " + id);
- }
- }
-
- private Type parseType() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec.
- int id = buf.get();
- switch (id) {
- case Signature.ELEMENT_TYPE_SZARRAY:
- Type arrT = Type.mkArray(parseType(), 1);
- return arrT;
- case Signature.X_ELEMENT_TYPE_ENUM:
- String enumName = parseString();
- Type enumT = Type.getType(enumName);
- return enumT;
- default:
- Type t = (Type)id2type.get(new Integer(id));
- assert t != null : PEFile.byte2hex(id);
- return t;
- }
- }
-
- private Type parse0x51() {
- int id = buf.get();
- switch (id) {
- case 0x51:
- return parse0x51();
- case Signature.ELEMENT_TYPE_SZARRAY:
- Type arrT = Type.mkArray(parseType(), 1);
- return arrT;
- case Signature.X_ELEMENT_TYPE_ENUM:
- String enumName = parseString();
- Type enumT = Type.getType(enumName);
- return enumT;
- default:
- Type t = (Type)id2type.get(new Integer(id));
- assert t != null : PEFile.byte2hex(id);
- return t;
- }
- }
-
-
- private Type parseFieldOrPropTypeInNamedArg() { // FieldOrPropType, Sec. 23.3 in Partition II of CLR Spec.
- int id = buf.get();
- switch (id) {
- case 0x51:
- return (Type)(id2type.get(new Integer(Signature.ELEMENT_TYPE_OBJECT)));
- // TODO remove case Signature.ELEMENT_TYPE_SZARRAY:
- // Type arrT = Type.mkArray(parseType(), 1);
- // return arrT;
- case Signature.X_ELEMENT_TYPE_ENUM:
- String enumName = parseString();
- Type enumT = Type.getType(enumName); // TODO this "lookup" only covers already-loaded assemblies.
- return enumT; // TODO null as return value (due to the above) spells trouble later.
- default:
- Type t = (Type)id2type.get(new Integer(id));
- assert t != null : PEFile.byte2hex(id);
- return t;
- }
- }
-
- private Type getTypeFromSerString() {
- String typename = parseString();
- int i = typename.indexOf(',');
- /* fully qualified assembly name follows. Just strip it on the assumption that
- the assembly is referenced in the externs and the type will be found. */
- String name = (i < 0) ? typename : typename.substring(0, i);
- Type t = Type.GetType(name);
- if (t == null && i > 0) {
- int j = typename.indexOf(',', i + 1);
- if (j > 0) {
- String assemName = typename.substring(i + 1, j);
- try {
- Assembly.LoadFrom(assemName);
- } catch (Throwable e) {
- throw new RuntimeException(typename, e);
- }
- t = Type.GetType(name);
- }
- }
- assert t != null : typename;
- return t;
- }
-
- private boolean[] parseBooleanArray() {
- boolean[] arr = new boolean[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.get() == 0 ? false : true;
- return arr;
- }
-
- private char[] parseCharArray() {
- char[] arr = new char[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getChar();
- return arr;
- }
-
- private byte[] parseByteArray() {
- byte[] arr = new byte[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.get();
- return arr;
- }
-
- private short[] parseShortArray() {
- short[] arr = new short[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getShort();
- return arr;
- }
-
- private int[] parseIntArray() {
- int[] arr = new int[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getInt();
- return arr;
- }
-
- private long[] parseLongArray() {
- long[] arr = new long[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getLong();
- return arr;
- }
-
- private float[] parseFloatArray() {
- float[] arr = new float[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getFloat();
- return arr;
- }
-
- private double[] parseDoubleArray() {
- double[] arr = new double[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = buf.getDouble();
- return arr;
- }
-
- private String[] parseStringArray() {
- String[] arr = new String[buf.getInt()];
- for (int i = 0; i < arr.length; i++)
- arr[i] = parseString();
- return arr;
- }
-
- private String parseString() { // SerString convention
- String str = null;
- int length = parseLength();
- if (length < 0)
- return null;
- try { str = new String(value, buf.position(), length, "UTF-8" ); }
- catch (UnsupportedEncodingException e) { throw new Error(e); }
- buf.position(buf.position() + length);
- return str;
- }
-
- private int getByte() {
- return (buf.get() + 0x0100) & 0xff;
- }
-
- public int parseLength() {
- int length = getByte();
- // check for invalid length format: the first, second or third
- // most significant bits should be 0; if all are 1 the length is invalid.
- if ((length & 0xe0) == 0xe0)
- return -1;
- if ((length & 0x80) != 0) {
- length = ((length & 0x7f) << 8) | getByte();
- if ((length & 0x4000) != 0)
- length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte();
- }
- return length;
- }
-
- //##########################################################################
- private static void formatValue(StringBuffer str, Object o) {
- Class c = (o == null) ? null : o.getClass();
- if (c == null) {
- str.append("<null>");
- } else if (c == String.class) {
- str.append('"');
- str.append(o);
- str.append('"');
- } else if (c == Character.class) {
- str.append('\'');
- str.append(o);
- str.append('\'');
- } else if (c == boolean[].class) {
- str.append("new boolean[] {");
- boolean[] arr = (boolean[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == char[].class) {
- str.append("new short[] {");
- short[] arr = (short[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == byte[].class) {
- str.append("new byte[] {");
- byte[] arr = (byte[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == short[].class) {
- str.append("new short[] {");
- short[] arr = (short[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == int[].class) {
- str.append("new int[] {");
- int[] arr = (int[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == long[].class) {
- str.append("new long[] {");
- long[] arr = (long[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == float[].class) {
- str.append("new float[] {");
- float[] arr = (float[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == double[].class) {
- str.append("new double[] {");
- double[] arr = (double[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- str.append(arr[i]);
- }
- str.append('}');
- } else if (c == String[].class) {
- str.append("new String[] {");
- String[] arr = (String[])o;
- for (int i = 0; i < arr.length; i++) {
- if (i > 0) str.append(", ");
- formatValue(str, arr[i]);
- }
- str.append('}');
- } else if (o instanceof Type) {
- str.append("typeof(");
- str.append(o);
- str.append(")");
- } else
- str.append(o);
- }
-
- //##########################################################################
-
- /** Represents named arguments (assigned outside of the constructor)
- * of a custom attribute
- */
- public static class NamedArgument {
-
- /** Designates if the named argument corresponds to a field or property.
- * Possible values:
- * Signature.X_ELEMENT_KIND_FIELD = 0x53
- * Signature.X_ELEMENT_KIND_PROPERTY = 0x54
- */
- public final int designator;
-
- /** The name of the field/property. */
- public final String name;
-
- /** Type of the field/property. */
- public final Type type;
-
- /** The value for the field/property. */
- public final Object value;
-
- /** An empty array NamedArgument. */
- public static final NamedArgument[] EMPTY = new NamedArgument[0];
-
- public NamedArgument(int designator, String name,Type type,Object value)
- {
- this.designator = designator;
- this.name = name;
- this.type = type;
- this.value = value;
- }
-
- /** @return <b>true</b> if the named argument specifies a field;
- * <b>false<b> otherwise.
- */
- public boolean isField() {
- return designator == Signature.X_ELEMENT_KIND_FIELD;
- }
-
- /** @return <b>true</b> if the named argument specifies a property;
- * <b>false<b> otherwise.
- */
- public boolean isProperty() {
- return designator == Signature.X_ELEMENT_KIND_PROPERTY;
- }
-
- /** @return a string representation of the named argument. */
- public String toString() {
- StringBuffer str = new StringBuffer(name);
- str.append(" = ");
- if (type.IsEnum())
- str.append('(').append(type.FullName).append(')');
- formatValue(str, value);
- return str.toString();
- }
- }
-
- //##########################################################################
-
- public static class BoxedArgument {
- public final Type type;
- public final Object value;
- public BoxedArgument(Type type, Object value) {
- this.type = type; this.value = value;
- }
- public String toString() {
- return "(" + type.FullName + ")" + value;
- }
- }
-
- //##########################################################################
-
-} // class Attribute
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java b/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
deleted file mode 100644
index cac2319b50..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/BindingFlags.java
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that control binding and the way in which
- * the search for members and types is conducted by reflection.
- *
- * Note: You must specify Instance or Static along with Public or NonPublic
- * or no members will be returned.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class BindingFlags {
-
- //##########################################################################
-
- // disallows extending the class;
- private BindingFlags() {}
-
- /**
- * Specifies no binding flag.
- */
- public static final int Default = 0x0000;
-
- /**
- * Specifies that the case of the member name should not be considered
- * when binding.
- */
- public static final int IgnoreCase = 0x0001;
-
- /**
- * Specifies that only members declared at the level of the supplied type's
- * hierarchy should be considered. Inherited members are not considered.
- */
- public static final int DeclaredOnly = 0x0002;
-
- /**
- * Specifies that instance members are to be included in the search.
- */
- public static final int Instance = 0x0004;
-
- /**
- * Specifies that static members are to be included in the search.
- */
- public static final int Static = 0x0008;
-
- /**
- * Specifies that public members are to be included in the search.
- */
- public static final int Public = 0x0010;
-
- /**
- * Specifies that non-public members are to be included in the search.
- */
- public static final int NonPublic = 0x0020;
-
- /**
- * Specifies that static members up the hierarchy should be returned.
- * Static members include fields, methods, events, and properties.
- * Nested types are not returned.
- */
- public static final int FlattenHierarchy = 0x0040;
-
- /**
- * Specifies that a method is to be invoked. This may not be a constructor
- * or a type initializer.
- */
- public static final int InvokeMethod = 0x0100;
-
- /**
- * Specifies that Reflection should create an instance of
- * the specified type. Calls the constructor that matches
- * the given arguments. The supplied member name is ignored.
- * If the type of lookup is not specified, (Instance | Public)
- * will apply. It is not possible to call a type initializer.
- */
- public static final int CreateInstance = 0x0200;
-
- /**
- * Specifies that the value of the specified field should be returned.
- */
- public static final int GetField = 0x0400;
-
- /**
- * Specifies that the value of the specified field should be set.
- */
- public static final int SetField = 0x0800;
-
- /**
- * Specifies that the value of the specified property should be returned.
- */
- public static final int GetProperty = 0x1000;
-
- /**
- * Specifies that the value of the specified property should be set.
- * For COM properties, specifying this binding flag is equivalent to
- * specifying PutDispProperty and PutRefDispProperty.
- */
- public static final int SetProperty = 0x2000;
-
- /**
- * Specifies that the PROPPUT member on a COM object should be invoked.
- * PROPPUT specifies a property-setting function that uses a value.
- * Use PutDispProperty if a property has both PROPPUT and PROPPUTREF
- * and you need to distinguish which one is called.
- */
- public static final int PutDispProperty = 0x4000;
-
-
- /**
- * Specifies that the PROPPUTREF member on a COM object should be invoked.
- * PROPPUTREF specifies a property-setting function that uses a reference
- * instead of a value. Use PutRefDispProperty if a property has both
- * PROPPUT and PROPPUTREF and you need to distinguish which one is called.
- */
- public static final int PutRefDispProperty = 0x8000;
-
- /**
- * Specifies that types of the supplied arguments must exactly match
- * the types of the corresponding formal parameters. Reflection
- * throws an exception if the caller supplies a non-null Binder object,
- * since that implies that the caller is supplying BindToXXX
- * implementations that will pick the appropriate method.
- * Reflection models the accessibility rules of the common type system.
- * For example, if the caller is in the same assembly, the caller
- * does not need special permissions for internal members. Otherwise,
- * the caller needs ReflectionPermission. This is consistent with
- * lookup of members that are protected, private, and so on.
- * The general principle is that ChangeType should perform only
- * widening coercions, which never lose data. An example of a
- * widening coercion is coercing a value that is a 32-bit signed integer
- * to a value that is a 64-bit signed integer. This is distinguished
- * from a narrowing coercion, which may lose data. An example of
- * a narrowing coercion is coercing a 64-bit signed integer to
- * a 32-bit signed integer.
- * The default binder ignores this flag, while custom binders can
- * implement the semantics of this flag.
- */
- public static final int ExactBinding = 0x10000;
-
- /**
- * Used in COM interop to specify that the return value of the member
- * can be ignored.
- */
- public static final int IgnoreReturn = 0x100000 ;
-
- /**
- * Returns the set of members whose parameter count matches the number
- * of supplied arguments. This binding flag is used for methods with
- * parameters that have default values and methods with variable arguments
- * (varargs). This flag should only be used with Type.InvokeMember.
- * Parameters with default values are used only in calls where trailing
- * arguments are omitted. They must be the last arguments.
- */
- public static final int OptionalParamBinding = 0x40000;
-
- /**
- * Not implemented.
- */
- public static final int SuppressChangeType = 0x20000;
-
- //##########################################################################
-
-} // class BindingFlags
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java b/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
deleted file mode 100644
index 50bf9fb5d5..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CallingConventions.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Calling conventions
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class CallingConventions {
-
- //########################################################################
-
- /**
- * Specifies the default calling convention as determined by the
- * common language runtime.
- */
- public static final short Standard = (short) 0x0001;
-
- /**
- * Specifies the calling convention for methods with variable arguments.
- */
- public static final short VarArgs = (short) 0x0002;
-
- /**
- * Specifies that either the Standard or the VarArgs calling
- * convention may be used.
- */
- public static final short Any = Standard | VarArgs;
-
- /**
- * Specifies an instance or virtual method (not a static method).
- * At run-time, the called method is passed a pointer to the target
- * object as its first argument (the this pointer). The signature
- * stored in metadata does not include the type of this first argument,
- * because the method is known and its owner class can be discovered
- * from metadata.
- */
- public static final short HasThis = (short) 0x0020;
-
- /**
- * Specifies that the signature is a function-pointer signature,
- * representing a call to an instance or virtual method (not a static
- * method). If ExplicitThis is set, HasThis must also be set. The first
- * argument passed to the called method is still a this pointer, but the
- * type of the first argument is now unknown. Therefore, a token that
- * describes the type (or class) of the this pointer is explicitly stored
- * into its metadata signature.
- */
- public static final short ExplicitThis = (short) 0x0040;
-
- //########################################################################
-
- private CallingConventions() {}
-
- public static String toString(int callConv) {
- StringBuffer s = new StringBuffer();
-
- if ((callConv & HasThis) != 0) {
- s.append("instance");
- if ((callConv & ExplicitThis) != 0)
- s.append(" explicit");
- }
-
- return s.toString();
- }
-
- //##########################################################################
-
-} // class CallingConventions
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java
deleted file mode 100644
index 8c82cb4876..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructedType.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Arrays;
-
-/* The only reason for ConstructedType to extend Type is complying with existing code
- (e.g., caseFieldBuilder in ILPrinterVisitor) expecting a Type.
- */
-public class ConstructedType extends Type {
-
- public final Type instantiatedType;
- public final Type[] typeArgs;
-
- public ConstructedType(Type instantiatedType, Type[] typeArgs) {
- super(instantiatedType.Module, instantiatedType.Attributes, "", null, null, null, instantiatedType.auxAttr /*AuxAttr.None*/ , null);
- this.instantiatedType = instantiatedType;
- this.typeArgs = typeArgs;
- }
-
- public String toString() {
- String res = instantiatedType.toString() + "[";
- for (int i = 0; i < typeArgs.length; i++) {
- res = res + typeArgs[i].toString();
- if(i + 1 < typeArgs.length) {
- res = res + ", ";
- }
- }
- return res + "]";
- }
-
-
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
-
- ConstructedType that = (ConstructedType) o;
-
- if (!instantiatedType.equals(that.instantiatedType)) return false;
- if (!Arrays.equals(typeArgs, that.typeArgs)) return false;
-
- return true;
- }
-
- public int hashCode() {
- int result = instantiatedType.hashCode();
- result = 31 * result + Arrays.hashCode(typeArgs);
- return result;
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
deleted file mode 100644
index 69f5d6d32a..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ConstructorInfo.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a class constructor and provides
- * access to constructor metadata.
- * ConstructorInfo is used to discover the attributes of a constructor
- * as well as to invoke a constructor. Objects are created by invoking
- * either the GetConstructors or GetConstructor method of a Type object.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class ConstructorInfo extends MethodBase {
- //##########################################################################
-
- public final int MemberType() { return MemberTypes.Constructor; }
-
- public final boolean IsConstructor() { return true; }
-
- protected static final String CTOR = ".ctor";
- protected static final String CCTOR = ".cctor";
- protected static final ConstructorInfo[] EMPTY_ARRAY = new ConstructorInfo[0];
-
- protected static String getName(int attrs) {
- return (attrs & MethodAttributes.Static) == 0 ? CTOR : CCTOR;
- }
-
- /** Public constructors */
-
- public ConstructorInfo(Type declType, int attrs, Type[] paramTypes) {
- super(getName(attrs), declType, attrs, paramTypes);
- assert declType != null : "Owner can't be 'null' for a constructor!";
- }
-
- public ConstructorInfo(Type declType, int attrs, ParameterInfo[] params)
- {
- super(getName(attrs), declType, attrs, params);
- assert declType != null : "Owner can't be 'null' for a constructor!";
- }
-
-
- public String toString() {
- return MethodAttributes.toString(Attributes) + " " + Type.VOID() +
- " " + DeclaringType.FullName + "::" + Name + params2String();
- }
-
- //##########################################################################
-
-} // class ConstructorInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
deleted file mode 100644
index 0e58c18114..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomAttributeProvider.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.List;
-import java.util.LinkedList;
-import java.util.Iterator;
-
-/**
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class CustomAttributeProvider implements ICustomAttributeProvider {
-
- //##########################################################################
-
- protected List/*<Attribute>*/ custAttrs;
- private static final Object[] EMPTY = new Object[0];
-
- //TODO: take inherit into account
- public Object[] GetCustomAttributes(boolean inherit) {
- initAttributes(null);
- return custAttrs.size() == 0 ? EMPTY
- : custAttrs.toArray(new Attribute[custAttrs.size()]);
- }
-
- //TODO: take inherit into account
- public Object[] GetCustomAttributes(Type attributeType, boolean inherit) {
- initAttributes(attributeType);
- List tAttrs = null;
- if (constrType == attributeType)
- tAttrs = custAttrs;
- else {
- tAttrs = new LinkedList();
- for (Iterator attrs = custAttrs.iterator(); attrs.hasNext(); ) {
- Attribute a = (Attribute) attrs.next();
- if (a.GetType() == attributeType) tAttrs.add(a);
- }
- }
- return tAttrs.size() == 0 ? EMPTY
- : tAttrs.toArray(new Attribute[tAttrs.size()]);
- }
-
- //TODO: take inherit into account
- public boolean IsDefined(Type attributeType, boolean inherit) {
- initAttributes(attributeType);
- if (constrType == attributeType)
- return custAttrs.size() > 0;
- Iterator attrs = custAttrs.iterator();
- while (attrs.hasNext()) {
- if (((Attribute)attrs.next()).GetType() == attributeType)
- return true;
- }
- return false;
-// return inherit && (DeclaringClass.BaseType != null)
-// && DeclaringClass.BaseType.IsDefined(inherit);
- }
-
- protected void addCustomAttribute(ConstructorInfo constr, byte[] value) {
- Attribute attr = new Attribute(constr, value);
- assert constrType == null || constrType == attr.GetType();
- if (custAttrs == null)
- custAttrs = new LinkedList();
- custAttrs.add(attr);
- }
-
- private void initAttributes(Type atype) {
- if (custAttrs != null
- && (constrType == null || constrType == atype))
- return;
- custAttrs = new LinkedList();
- constrType = atype;
- loadCustomAttributes(atype);
- }
-
- protected void loadCustomAttributes(Type atype) {}
-
- private Type constrType;
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java b/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java
deleted file mode 100644
index cf30008c60..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/CustomModifier.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Quoting from the CIL spec, Partition II, Sec. 7.1.1:
- *
- * Custom modifiers, defined using `modreq` (required modifier) and `modopt` (optional modifier), are
- * similar to custom attributes (Sec. 21) except that modifiers are part of a signature rather than being attached to a
- * declaration. Each modifer associates a type reference with an item in the signature.
- *
- */
-public class CustomModifier {
-
- public boolean isReqd;
- public Type marker;
-
- public CustomModifier(boolean isReqd, Type marker) {
- this.isReqd = isReqd;
- this.marker = marker;
- }
-
- public String toString() {
- String res = (isReqd ? "modreq( " : "modopt( ") + marker.toString() + " )";
- return res;
- }
-
- public static Type[] helperCustomMods(boolean isReqd, CustomModifier[] cmods) {
- if(cmods == null) return null;
- int count = 0;
- for (int idx = 0; idx < cmods.length; idx++) {
- if(cmods[idx].isReqd == isReqd) count++;
- }
- Type[] res = new Type[count];
- int residx = 0;
- for (int idx = 0; idx < cmods.length; idx++) {
- res[residx] = cmods[idx].marker;
- residx++;
- }
- return res;
- }
-
- public static Type VolatileMarker() {
- return Type.GetType("System.Runtime.CompilerServices.IsVolatile");
- }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
deleted file mode 100644
index a183993cb9..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventAttributes.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that describe the attributes of a an event.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class EventAttributes {
-
- //##########################################################################
-
- /** Specifies that the event has no attributes. */
- public static final short None = 0x000;
-
- /** Specifies a reserved flag for CLR use only. */
- public static final short ReservedMask = 0x0400;
-
- /** Specifies that the event is special in a way described by the name. */
- public static final short SpecialName = 0x0200;
-
- /** Specifies the the CLR should check name encoding. */
- public static final short RTSpecialName = 0x0400;
-
- //##########################################################################
-
-} // class EventAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
deleted file mode 100644
index 3ccba7900b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/EventInfo.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Discovers the attributes of an event
- * and provides access to event metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class EventInfo extends MemberInfo {
-
- //##########################################################################
-
- public final int MemberType() { return MemberTypes.Event; }
-
- /** Attributes associated with the event. */
- public final short Attributes;
-
- /** The Type object for the underlying event-handler delegate
- * associated with this event.
- */
- public final Type EventHandlerType;
-
- public MethodInfo GetAddMethod() { return addMethod; }
-
- public MethodInfo GetRemoveMethod() { return removeMethod; }
-
- public String toString() {
- return "" + EventHandlerType + " " + Name;
- }
-
- //##########################################################################
-
- protected static final EventInfo[] EMPTY_ARRAY = new EventInfo[0];
-
- protected MethodInfo addMethod;
-
- protected MethodInfo removeMethod;
-
- protected EventInfo(String name, Type declType, short attr,
- Type handlerType, MethodInfo add, MethodInfo remove)
- {
- super(name, declType);
- Attributes = attr;
- EventHandlerType = handlerType;
- this.addMethod = add;
- this.removeMethod = remove;
- }
-
- //##########################################################################
-
-} // class EventInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
deleted file mode 100644
index d7d1bb3d54..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldAttributes.java
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies flags that describe the attributes of a field.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class FieldAttributes {
-
- //##########################################################################
-
- /** Specifies the access level of a given field. */
- public static final short FieldAccessMask = 0x0007;
-
- /** Member not refereneceable. */
- public static final short CompilerControlled = 0x0000;
-
- /** Field is accessible only by the parent type. */
- public static final short Private = 0x0001;
-
- /** Field is accessible only by subtypes in this assembly. */
- public static final short FamANDAssem = 0x0002;
-
- /** Field is accessible throughout the assembly. */
- public static final short Assembly = 0x0003;
-
- /** Field is accessible only by type and subtypes. */
- public static final short Family = 0x0004;
-
- /** Field is accessible by subtypes anywhere,
- * as well as throughout this assembly. */
- public static final short FamORAssem = 0x0005;
-
- /** Specifies that the field is accessible by any member
- * for whom this scope is visible. */
- public static final short Public = 0x0006;
-
- //##########################################################################
- //
-
- /** Field represents the defined type, or else it is per-instance. */
- public static final short Static = 0x0010;
-
- /** Field is initialized only and cannot be written after initialization. */
- public static final short InitOnly = 0x0020;
-
- /** Value is compile-time constant. */
- public static final short Literal = 0x0040;
-
- /** Field does not have to be serialized when the type is remoted. */
- public static final short NotSerialized = 0x0080;
-
- /** Field is special. */
- public static final short SpecialName = 0x0200;
-
- //##########################################################################
- // Interop attributes
-
- /** Implementation is forwarded through PInvoke */
- public static final short PinvokeImpl = 0x2000;
-
-
- //##########################################################################
- // Additional flags
-
- /** CLI provides 'special' behavior depending upon the name of the field */
- public static final short RTSpecialName = 0x0400;
-
- /** Field has marshalling information. */
- public static final short HasFieldMarshal = 0x1000;
-
- /** Field has a default value. */
- public static final short HasDefault = (short)0x8000;
-
- /** Field has a Relative Virtual Address (RVA). The RVA is the location
- * of the method body in the current image, as an address relative
- * to the start of the image file in which it is located. */
- public static final short HasFieldRVA = 0x0100;
-
- //##########################################################################
- //
-
- public static String toString(short attrs) {
- StringBuffer str = new StringBuffer();
- switch (attrs & FieldAccessMask) {
- case CompilerControlled: str.append("compilercontrolled"); break;
- case Private: str.append("private"); break;
- case FamANDAssem: str.append("famandassem"); break;
- case Assembly: str.append("assembly"); break;
- case Family: str.append("family"); break;
- case FamORAssem: str.append("famorassem"); break;
- case Public: str.append("public"); break;
- }
- if ((attrs & Static) != 0) str.append(" static");
- if ((attrs & InitOnly) != 0) str.append(" initonly");
- if ((attrs & Literal) != 0) str.append(" literal");
- if ((attrs & NotSerialized) != 0) str.append(" notserialized");
- if ((attrs & SpecialName) != 0) str.append(" specialname");
- if ((attrs & PinvokeImpl) != 0) str.append("");
- if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
- if ((attrs & HasFieldMarshal) != 0) str.append(" marshal(<native type>)");
- //if ((attrs & HasDefault) != 0) str.append(" default(???)");
- return str.toString();
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private FieldAttributes() {}
-
- //##########################################################################
-
-} // class FieldAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
deleted file mode 100644
index 536a67e9a8..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/FieldInfo.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-/**
- * Discovers the attributes of a field and provides access to field metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class FieldInfo extends MemberInfo implements HasCustomModifiers {
-
- //##########################################################################
- // public interface
-
- public final int MemberType() { return MemberTypes.Field; }
-
- /** Attributes associated with this field. */
- public final short Attributes;
-
- /** Type of the field represented by this FieldInfo object. */
- public final Type FieldType;
-
- /** can be null */
- public final CustomModifier[] cmods;
-
- protected final Object value;
-
- public final boolean IsStatic() {
- return (Attributes & FieldAttributes.Static) != 0;
- }
-
- public final boolean IsInitOnly() {
- return (Attributes & FieldAttributes.InitOnly) != 0;
- }
-
- public final boolean IsLiteral() {
- return (Attributes & FieldAttributes.Literal) != 0;
-
- }
-
- public final boolean IsPublic() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Public;
- }
-
- public final boolean IsPrivate() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Private;
- }
-
- public final boolean IsFamily() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Family;
- }
-
- public final boolean IsAssembly() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.Assembly;
- }
-
- public final boolean IsFamilyOrAssembly() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.FamORAssem;
- }
-
- public final boolean IsFamilyAndAssembly() {
- return (Attributes & FieldAttributes.FieldAccessMask)
- == FieldAttributes.FamANDAssem;
- }
- public final boolean IsSpecialName() {
- return (Attributes & FieldAttributes.SpecialName) != 0;
- }
-
- public final boolean IsPinvokeImpl() {
- return (Attributes & FieldAttributes.PinvokeImpl) != 0;
- }
-
- public final boolean IsNotSerialized() {
- return (Attributes & FieldAttributes.NotSerialized) != 0;
- }
-
- private boolean knownVolatile = false;
- private boolean cachedVolatile = false;
- public final boolean IsVolatile() {
- if(knownVolatile) return cachedVolatile;
- knownVolatile = true;
- if(cmods == null) {
- cachedVolatile = false;
- return cachedVolatile;
- }
- for (int idx = 0; idx < cmods.length; idx++) {
- if(cmods[idx].marker == CustomModifier.VolatileMarker()) {
- cachedVolatile = true;
- return cachedVolatile;
- }
- }
- cachedVolatile = false;
- return cachedVolatile;
- }
-
- public final Type[] GetOptionalCustomModifiers () {
- return CustomModifier.helperCustomMods(false, cmods);
- }
-
- public final Type[] GetRequiredCustomModifiers() {
- return CustomModifier.helperCustomMods(true, cmods);
- }
-
- public String toString() {
- return FieldAttributes.toString(Attributes) + " " +
- FieldType + " " + DeclaringType.FullName + "::" + Name;
- }
-
- //##########################################################################
-
- protected static final FieldInfo[] EMPTY_ARRAY = new FieldInfo[0];
-
- /** Initializes a new instance of the FieldInfo class. */
- protected FieldInfo(String name, Type declType,
- int attrs, PECustomMod fieldTypeWithMods, Object value)
- {
- super(name, declType);
- FieldType = fieldTypeWithMods.marked;
- cmods = fieldTypeWithMods.cmods;
- Attributes = (short) attrs;
- this.value = value;
- }
-
- /**
- */
- public Object getValue() { return value; }
-
- //##########################################################################
-
-} // class FieldInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java b/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java
deleted file mode 100644
index 6237fbafee..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/GenericParamAndConstraints.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * @author Miguel Garcia
- */
-public class GenericParamAndConstraints {
-
- public GenericParamAndConstraints(int Number, String Name, Type[] Constraints,
- boolean isInvariant, boolean isCovariant, boolean isContravariant,
- boolean isReferenceType, boolean isValueType, boolean hasDefaultConstructor) {
- this.Number = Number;
- this.Name = Name;
- this.Constraints = Constraints; // TODO representation for the class and new() constraints missing
- this.isInvariant = isInvariant;
- this.isCovariant = isCovariant;
- this.isContravariant = isContravariant;
- this.isReferenceType = isReferenceType;
- this.isValueType = isValueType;
- this.hasDefaultConstructor = hasDefaultConstructor;
-
- }
-
- public final int Number;
- public final String Name; // can be null
- public final Type[] Constraints; // can be empty array
- public final boolean isInvariant; // only relevant for TVars, not for an MVar
- public final boolean isCovariant; // only relevant for TVars, not for an MVar
- public final boolean isContravariant; // only relevant for TVars, not for an MVar
- public final boolean isReferenceType;
- public final boolean isValueType;
- public final boolean hasDefaultConstructor;
-
- public String toString() {
- String res = Name == null ? "<NoName>" : (Name.equals("") ? "<NoName>" : Name);
- res = res + " <: " + Constraints;
- return res;
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java b/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java
deleted file mode 100644
index 5ead087350..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/HasCustomModifiers.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-public interface HasCustomModifiers {
-
- public Type[] GetOptionalCustomModifiers();
-
- public Type[] GetRequiredCustomModifiers();
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java b/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
deleted file mode 100644
index 927185962c..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ICustomAttributeProvider.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Provides custom attributes for reflection objects that support them.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public interface ICustomAttributeProvider {
-
- //##########################################################################
- // interface method definitions
-
- /** Returns an array of all of the custom attributes
- * defined on this member, excluding named attributes,
- * or an empty array if there are no custom attributes.
- *
- * @param inherit - When true, look up the hierarchy chain
- * for the inherited custom attribute.
- * @return - An array of Objects representing custom attributes,
- * or an empty array.
- */
- public Object[] GetCustomAttributes(boolean inherit);
-
-
- /** Returns an array of custom attributes defined on this member,
- * identified by type, or an empty array
- * if there are no custom attributes of that type.
- *
- * @param attributeType - The type of the custom attributes.
- * @param inherit - When true, look up the hierarchy chain
- * for the inherited custom attribute.
- * @return - An array of Objects representing custom attributes,
- * or an empty array.
- */
- public Object[] GetCustomAttributes(Type attributeType, boolean inherit);
-
-
- /** Indicates whether one or more instance of attributeType
- * is defined on this member
- *
- * @param attributeType - The type of the custom attributes
- * @param inherit - When true, look up the hierarchy chain
- * for the inherited custom attribute.
- * @return - true if the attributeType is defined on this member;
- * false otherwise.
- */
- public boolean IsDefined(Type attributeType, boolean inherit);
-
- //##########################################################################
-
-} // interface ICustomAttributeProvider
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
deleted file mode 100644
index 65ff1b290b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberInfo.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * The root class of the Reflection hierarchy.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MemberInfo extends CustomAttributeProvider {
-
- //##########################################################################
-
- /** The name of this member. */
- public final String Name;
-
- /**
- * The class that declares this member.
- * Note: if the MemberInfo object is a global member,
- * (that is, it was obtained from Module.GetMethods,
- * which returns global methods on a module), then DeclaringType
- * will be a null reference.
- */
- public final Type DeclaringType;
-
- /** An enumerated value from the MemberTypes class,
- * specifying a constructor, event, field, method,
- * property, type information, all, or custom. */
- public abstract int MemberType();
-
- //##########################################################################
- // protected members
-
- protected static final MemberInfo[] EMPTY_ARRAY = new MemberInfo[0];
-
- protected MemberInfo(String name, Type declType) {
- Name = name;
- DeclaringType = declType;
- }
-
- //########################################################################
-
-} // class MemberInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java b/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
deleted file mode 100644
index 5f49ad3323..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MemberTypes.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Marks each type of member that is defined as a derived class of MemberInfo.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class MemberTypes {
-
- //##########################################################################
-
- /** Specifies that the member is a constructor,
- * representing a ConstructorInfo member. */
- public static final int Constructor = 0x01;
-
-
- /** Specifies that the member is an event,
- * representing an EventInfo member. */
- public static final int Event = 0x02;
-
-
- /** Specifies that the member is a field,
- * representing a FieldInfo member. */
- public static final int Field = 0x04;
-
-
- /** Specifies that the member is a method,
- * representing a MethodInfo member. */
- public static final int Method = 0x08;
-
-
- /** Specifies that the member is a property,
- * representing a PropertyInfo member.
- */
- public static final int Property = 0x10;
-
- /** Specifies that the member is a type,
- * representing a TypeInfo member. */
- public static final int TypeInfo = 0x20;
-
-
- /** Specifies that the member is a custom member type. */
- public static final int Custom = 0x40;
-
-
- /** Specifies that the member is a nested type,
- * extending MemberInfo. */
- public static final int NestedType = 0x80;
-
-
- /** Specifies all member types. */
- public static final int All =
- Constructor | Event | Field | Method | Property | TypeInfo | NestedType;
-
-
- public static String toString(int memberType) {
- if ((memberType & Constructor) != 0) return "Constructor";
- if ((memberType & Event) != 0) return "Event";
- if ((memberType & Field) != 0) return "Field";
- if ((memberType & Method) != 0) return "Method";
- if ((memberType & Property) != 0) return "Property";
- if ((memberType & TypeInfo) != 0) return "TypeInfo";
- if ((memberType & Custom) != 0) return "Custom";
- if ((memberType & NestedType) != 0) return "NestedType";
- return "Unknown MemberType: " + memberType;
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private MemberTypes() {}
-
- //##########################################################################
-
-} // class MemberTypes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
deleted file mode 100644
index a703c38fb8..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodAttributes.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/** Specifies flags for method attributes.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class MethodAttributes {
-
- //##########################################################################
- // Method access attributes
-
- /** Bitmask used to retrieve accessibility information. */
- public static final short MemberAccessMask = 0x0007;
-
- ///** Member not referenceable*/
- //public static final short CompilerConstrolled = 0x0000;
-
- /** Indicates that the member cannot be referenced. */
- public static final short PrivateScope = 0x0000;
-
- /** Method is accessible only by the current class. */
- public static final short Private = 0x0001;
-
- /** Method is accessible to members of this type
- * and its derived types that are in this assembly only. */
- public static final short FamANDAssem = 0x0002;
-
- /** Method is accessible to any class of this assembly. */
- public static final short Assembly = 0x0003;
-
- /** Method is accessible only to members of this class
- * and its derived classes. */
- public static final short Family = 0x0004;
-
- /** Method is accessible to derived classes anywhere,
- * as well as to any class in the assembly. */
- public static final short FamORAssem = 0x0005;
-
- /** Method is accessible to any object for which this object is in scope. */
- public static final short Public = 0x0006;
-
-
- //##########################################################################
- // Flags
-
- /** Method is defined on the type; otherwise, it is defined per instance. */
- public static final short Static = 0x0010;
-
- /** Method cannot be overridden. */
- public static final short Final = 0x0020;
-
- /** Method is virtual. */
- public static final short Virtual = 0x0040;
-
- /** Method hides by name and signature; otherwise, by name only. */
- public static final short HideBySig = 0x0080;
-
-
- //##########################################################################
- // vtable attributes
-
- /** Bitmask used to retrieve vtable attributes. */
- public static final short VtableLayoutMask = 0x0100;
-
- /** Method reuses existing slot in the vtable. */
- public static final short ReuseSlot = 0x0000;
-
-
- /** Method always gets a new slot in the vtable. */
- public static final short NewSlot = 0x0100;
-
-
- //##########################################################################
- // Flags
-
- /** Method does not provide implementation. */
- public static final short Abstract = 0x0400;
-
- /** Method is special. */
- public static final short SpecialName = 0x0800;
-
-
- //##########################################################################
- // Interop attributes
-
- /** Method implementation is forwarded through PInvoke. */
- public static final short PInvokeImpl = 0x2000;
-
- /** Reserved: shall be zero for conforming implementations.
- * Managed method is exported by thunk to unmanaged code. */
- public static final short UnmanagedExport = 0x0008;
-
-
- //##########################################################################
- // Additional flags
-
- /** CLI provides special behavior, depending on the name of the method. */
- public static final short RTSpecialName = 0x1000;
-
- /** Method has security associated with it.
- * Reserved flag for runtime use only.
- */
- public static final short HasSecurity = 0x00000040;
-
- /**
- * Indicates that the method calls another method containing security code.
- * Reserved flag for runtime use only.
- */
- public static final short RequireSecObject = 0x00004000;
-
- /** Indicates a reserved flag for runtime use only. */
- public static final short ReservedMask = 0x0000;
-
-
- //##########################################################################
-
- public static String toString(short attrs) {
- StringBuffer str = new StringBuffer(accessFlagsToString(attrs));
- if ((attrs & Static) != 0) str.append(" static");
- if ((attrs & Final) != 0) str.append(" final");
- if ((attrs & Virtual) != 0) str.append(" virtual");
- if ((attrs & Abstract) != 0) str.append(" abstract");
- if ((attrs & HideBySig) != 0) str.append(" hidebysig");
- if ((attrs & NewSlot) != 0) str.append(" newslot");
- if ((attrs & SpecialName) != 0) str.append(" specialname");
- if ((attrs & PInvokeImpl) != 0) str.append(" pinvokeimpl(?!?)");
- if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
- return str.toString();
-
- }
-
- public static String accessFlagsToString(short attrs) {
- switch (attrs & MemberAccessMask) {
- case PrivateScope: return "compilercontrolled";
- case Private: return "private";
- case FamANDAssem: return "famandassem";
- case Assembly: return "assembly";
- case Family: return "family";
- case FamORAssem: return "famorassem";
- case Public: return "public";
- default: return "xxx";
- }
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private MethodAttributes() {}
-
- //##########################################################################
-
-} // class Method Attributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
deleted file mode 100644
index fe6404346e..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodBase.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Iterator;
-
-/**
- * The common superclass of MemberInfo and ConstructorInfo
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MethodBase extends MemberInfo {
-
- //##########################################################################
- // public interface
-
- private java.util.List /* GenericParamAndConstraints */ mVars = new java.util.LinkedList();
- private GenericParamAndConstraints[] sortedMVars = null;
-
- public void addMVar(GenericParamAndConstraints tvarAndConstraints) {
- sortedMVars = null;
- mVars.add(tvarAndConstraints);
- }
-
- public GenericParamAndConstraints[] getSortedMVars() {
- if(sortedMVars == null) {
- sortedMVars = new GenericParamAndConstraints[mVars.size()];
- for (int i = 0; i < sortedMVars.length; i ++){
- Iterator iter = mVars.iterator();
- while(iter.hasNext()) {
- GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next();
- if(tvC.Number == i) {
- sortedMVars[i] = tvC;
- }
- }
- }
- }
- return sortedMVars;
- }
-
- public final boolean IsGeneric() {
- return mVars.size() > 0;
- }
-
- /** The attributes associated with this method/constructor. */
- public final short Attributes;
-
- /***/
- public final short CallingConvention;
-
- public abstract boolean IsConstructor();
-
- public final boolean IsAbstract() {
- return (Attributes & MethodAttributes.Abstract) != 0;
- }
-
- public final boolean IsFinal() {
- return (Attributes& MethodAttributes.Final) != 0;
- }
-
- public final boolean IsVirtual() {
- return (Attributes& MethodAttributes.Virtual) != 0;
- }
-
- public final boolean IsInstance() {
- return !IsStatic() && !IsVirtual();
- }
-
- public final boolean IsStatic() {
- return (Attributes & MethodAttributes.Static) != 0;
- }
-
- public final boolean IsHideBySig() {
- return (Attributes & MethodAttributes.HideBySig) != 0;
- }
-
- public final boolean IsSpecialName() {
- return (Attributes & MethodAttributes.SpecialName) != 0;
- }
-
-
- public final boolean IsPublic() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Public;
- }
-
- public final boolean IsPrivate() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Private;
- }
-
- public final boolean IsFamily() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Family;
- }
-
- public final boolean IsAssembly() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.Assembly;
- }
-
- public final boolean IsFamilyOrAssembly() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.FamORAssem;
- }
-
- public final boolean IsFamilyAndAssembly() {
- return (Attributes & MethodAttributes.MemberAccessMask)
- == MethodAttributes.FamANDAssem;
- }
-
- public boolean HasPtrParamOrRetType() {
- // the override in MethodInfo checks the return type
- ParameterInfo[] ps = GetParameters();
- for (int i = 0; i < ps.length; i++) {
- Type pT = ps[i].ParameterType;
- if(pT.IsPointer()) {
- // Type.mkPtr creates a msil.Type for a pointer type
- return true;
- }
- if(pT.IsByRef() && !pT.GetElementType().CanBeTakenAddressOf()) {
- /* TODO Cases where GenMSIL (so far) con't emit good bytecode:
- the type being taken address of IsArray(), IsGeneric(), or IsTMVarUsage.
- For example, System.Enum declares
- public static bool TryParse<TEnum>(string value, out TEnum result) where TEnum : struct, new();
- */
- return true;
- }
- }
- return false;
- }
-
- /** Returns the parameters of the method/constructor. */
- public ParameterInfo[] GetParameters() {
- return (ParameterInfo[]) params.clone();
- }
-
- public int GetMethodImplementationFlags() { return implAttributes; }
-
- //##########################################################################
-
- /** Method parameters. */
- protected ParameterInfo[] params;
-
- protected short implAttributes;
-
- protected MethodBase(String name, Type declType, int attrs, Type[] paramTypes)
- {
- this(name, declType, attrs);
- assert paramTypes != null;
- params = new ParameterInfo[paramTypes.length];
- for (int i = 0; i < params.length; i++)
- params[i] = new ParameterInfo(null, paramTypes[i], 0, i);
- }
-
- protected MethodBase(String name, Type declType, int attrs,
- ParameterInfo[] params)
- {
- this(name, declType, attrs);
- this.params = params;
- }
-
- /**
- */
- private MethodBase(String name, Type declType, int attrs) {
- super(name, declType);
-
- Attributes = (short) attrs;
-
- if (IsConstructor()) {
- attrs |= MethodAttributes.SpecialName;
- attrs |= MethodAttributes.RTSpecialName;
- }
-
- CallingConvention = (short) (CallingConventions.Standard
- | (IsStatic() ? (short)0 : CallingConventions.HasThis));
- }
-
- //##########################################################################
- // internal methods
-
- protected String params2String() {
- StringBuffer s = new StringBuffer("(");
- for (int i = 0; i < params.length; i++) {
- if (i > 0) s.append(", ");
- s.append(params[i].ParameterType);
- }
- s.append(")");
- return s.toString();
- }
-
- //##########################################################################
-
-} // class MethodBase
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
deleted file mode 100644
index 8e8d879593..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodImplAttributes.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Method implementation attributes
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class MethodImplAttributes {
-
- //##########################################################################
-
- /**
- * Specifies flags about code type. 3
- */
- public static final short CodeTypeMask = (short) 0x0003;
-
- /**
- * Specifies that the method implementation is in MSIL. 0
- */
- public static final short IL = (short) 0x0000;
-
- /**
- * Specifies that the method implementation is native. 1
- */
- public static final short Native = (short) 0x0001;
-
- /**
- * This member supports the .NET Framework infrastructure and
- * is not intended to be used directly from your code. 2
- */
- public static final short OPTIL = (short) 0x0002;
-
- /**
- * Specifies that the method implementation is provided by the runtime. 3
- */
- public static final short Runtime = (short) 0x0003;
-
-
-
- /**
- * Specifies whether the code is managed or unmanaged. 4
- */
- public static final short ManagedMask = (short) 0x0004;
-
- /**
- * Specifies that the method implementation is managed, otherwise unmanaged.
- */
- public static final short Managed = (short) 0x0000;
-
- /**
- * Specifies that the method implementation is unmanaged, otherwise managed.
- */
- public static final short Unmanaged = (short) 0x0004;
-
-
-
- /**
- * Specifies that the method cannot be inlined. 8
- */
- public static final short NoInlining = (short) 0x0008;
-
- /**
- * Specifies that the method is not defined. 16
- */
- public static final short ForwardRef = (short) 0x0010;
-
- /**
- * Specifies that the method is single-threaded through the body.
- * You can also use the C# lock statement or the Visual Basic
- * Lock function for this purpose. 32
- */
- public static final short Synchronized = (short) 0x0020;
-
- /**
- * Specifies that the method signature is exported exactly as declared. 128
- */
- public static final short PreserveSig = (short) 0x0080;
-
- /**
- * Specifies an internal call. 4096
- */
- public static final short InternalCall = (short) 0x1000;
-
- /**
- * Specifies a range check value. 65535
- */
- public static final short MaxMethodImplVal = (short) 0xffff;
-
- //##########################################################################
-
- public static String toString(int implAttr) {
- StringBuffer s = new StringBuffer();
- switch (implAttr & CodeTypeMask) {
- case IL: s.append("cil"); break;
- case Native: s.append("native"); break;
- case Runtime: s.append("runtime"); break;
- }
- switch (implAttr & ManagedMask) {
- case Managed: s.append(" managed"); break;
- case Unmanaged: s.append(" unmanaged"); break;
- }
- if ((implAttr & NoInlining) != 0) s.append(" noinlining");
- if ((implAttr & ForwardRef) != 0) s.append(" forwardref");
- if ((implAttr & Synchronized) != 0) s.append(" synchronized");
- if ((implAttr & InternalCall) != 0) s.append(" internalcall");
- return s.toString();
- }
-
- //##########################################################################
-
-} // class MethodImplAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
deleted file mode 100644
index a415e7551f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/MethodInfo.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Iterator;
-
-/**
- * Discovers the attributes of a method and provides access to method metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class MethodInfo extends MethodBase {
-
- public boolean HasPtrParamOrRetType() {
- if(ReturnType.IsByRef() && !(ReturnType.GetElementType().IsValueType())) {
- /* A method returning ByRef won't pass peverify, so I guess this is dead code. */
- return true;
- }
- if(ReturnType.IsPointer()) {
- return true;
- }
- return super.HasPtrParamOrRetType();
- }
-
- //##########################################################################
- // public members
-
- public final int MemberType() { return MemberTypes.Method; }
-
- public final boolean IsConstructor() { return false; }
-
- /** The return type of this method.
- */
- public final Type ReturnType;
-
- //##########################################################################
- // protected members
-
- protected static final MethodInfo[] EMPTY_ARRAY = new MethodInfo[0];
-
- /**
- * Constructor Initializes a new instance of the MethodInfo class.
- */
- protected MethodInfo(String name, Type declType,
- int attrs, Type returnType, Type[] paramTypes )
- {
- super(name, declType, attrs, paramTypes);
- ReturnType = returnType;
- }
-
- protected MethodInfo(String name, Type declType,
- int attrs, Type returnType, ParameterInfo[] params )
- {
- super(name, declType, attrs, params);
- ReturnType = returnType;
- }
-
- public String toString() {
- return MethodAttributes.toString(Attributes) + " " + ReturnType +
- " " + DeclaringType + "::" + Name + params2String();
- }
-
- //##########################################################################
-
-} // class MethodInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Module.java b/src/msil/ch/epfl/lamp/compiler/msil/Module.java
deleted file mode 100644
index 8dd5e7119f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Module.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/**
- * Defines and represents a module. Get an instance of ModuleBuilder
- * by calling DefineDynamicModule
- * A module is a portable executable file of type .dll or .exe consisting
- * of one or more classes and interfaces. There may be multiple namespaces
- * contained in a single module, and a namespace may span multiple modules.
- * One or more modules deployed as a unit compose an assembly.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Module extends CustomAttributeProvider {
-
- //##########################################################################
- // public fields
-
- /** String representing the name of the module with the path removed. */
- public final String Name;
-
- /** String representing the fully qualified name and path to this module. */
- public final String FullyQualifiedName;
-
- /** String representing the name of the module. */
- public String ScopeName;
-
- /** The Assembly the Module belongs to. */
- public final Assembly Assembly;
-
- //##########################################################################
- // constructor
-
- protected Module(String name, String filename,
- String scopeName, Assembly assembly)
- {
- this.Name = name;
- this.FullyQualifiedName = filename;
- this.ScopeName = scopeName;
- this.Assembly = assembly;
- }
-
- //##########################################################################
- // public methods
-
- /** Returns the specified class, performing a case-sensitive search. */
- public Type GetType(String name) {
- initTypes();
- return (Type) typesMap.get(name);
- }
-
- /**
- * @return all the classes defined within this module.
- */
- public Type[] GetTypes() {
- initTypes();
- return (Type[]) types.clone();
- }
-
- /**
- * @return the global field with the specified name.
- */
- public FieldInfo GetField(String name) {
- for (int i = 0; i < fields.length; i++)
- if (fields[i].Name.equals(name))
- return fields[i];
- return null;
- }
-
- /**
- * @return an array of the global fields of the module
- */
- public FieldInfo[] GetFields() {
- return (FieldInfo[]) fields.clone();
- }
-
- /**
- * @return - the global method with the specified name
- */
- public MethodInfo GetMethod(String name) {
- for (int i = 0; i < methods.length; i++)
- if (methods[i].Name.equals(name))
- return methods[i];
- return null;
- }
-
- /**
- * @return - an array of all the global methods defined in this modules.
- */
- public MethodInfo[] GetMethods() {
- return (MethodInfo[]) methods.clone();
- }
-
- /**
- */
- public String toString() { return Name; }
-
- //########################################################################
- // protected members
-
- // all the types defined in this module
- protected final Map typesMap = new HashMap();
-
- // all the types defined in this module
- protected Type[] types;
-
- // the global fields of the module
- protected FieldInfo[] fields = FieldInfo.EMPTY_ARRAY;
-
- // the global methods of the module
- protected MethodInfo[] methods = MethodInfo.EMPTY_ARRAY;
-
- protected Type addType(Type type) {
- addType(type.FullName, type);
- Assembly.addType(type);
- return type;
- }
-
- protected Type addType(String name, Type type) {
- assert type!= null;
- typesMap.put(name, type);
- return type;
- }
-
- private boolean initTypes = true;
- protected final void initTypes() {
- if (initTypes) {
- loadTypes();
- initTypes = false;
- }
- }
-
- protected void loadTypes() {}
-
- private boolean initGlobals = true;
- protected final void initGlobals() {
- if (initGlobals) {
- loadGlobals();
- initGlobals = false;
- }
- }
-
- protected void loadGlobals() {}
-
- //##########################################################################
-
-} // class Module
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java b/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
deleted file mode 100644
index a31db16c92..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEAssembly.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.io.File;
-
-import java.util.Map;
-import java.util.HashMap;
-
-/** Represents an assembly that resides in a real .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEAssembly extends Assembly {
-
- private final PEFile pefile;
-
- private PEModule mainModule;
-
- public PEAssembly(PEFile pefile, AssemblyName an) {
- super(an, true);
- this.pefile = pefile;
- String name = pefile.ModuleDef(1).getName();
- mainModule = new PEModule(pefile, 1, name, this);
- addModule(name, mainModule);
- //initModules();
- }
-
- protected void loadModules() {
- File parentDir = pefile.getParentFile();
- FileDef fd = pefile.FileDef;
- for (int row = 1; row <= fd.rows; row++) {
- fd.readRow(row);
- String filename = fd.getName();
- File f = new File(parentDir, filename);
- PEFile pe = Assembly.getPEFile(f);
- if (pe == null) {
- f = new File(filename);
- pe = Assembly.getPEFile(f);
- if (pe == null)
- continue;
-// throw new RuntimeException("Cannot find file " + filename +
-// " referenced by assembly " + this);
- }
- String name = pe.ModuleDef(1).getName();
- PEModule module = new PEModule(pe, 1, name, this);
- addModule(name, module);
- }
- }
-
- public File getFile() {
- return pefile.getUnderlyingFile();
- }
-
- protected void loadCustomAttributes(Type attributeType) {
- initModules();
- mainModule.initAttributes(this, 1, Table.AssemblyDef.ID, attributeType);
- }
-
- //##########################################################################
-
-} // class PEAssembly
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java b/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
deleted file mode 100644
index 3eb22b9985..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEFile.java
+++ /dev/null
@@ -1,941 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.*;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import ch.epfl.lamp.compiler.msil.Type;
-import ch.epfl.lamp.compiler.msil.Module;
-
-import java.io.File;
-import java.io.RandomAccessFile;
-import java.io.PrintStream;
-import java.io.IOException;
-import java.io.FileNotFoundException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.nio.MappedByteBuffer;
-
-import java.util.Date;
-
-/**
- * A class that represents a .NET PE/COFF image.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- * @see <a href="http://www.ecma-international.org/publications/standards/Ecma-335.htm">Standard ECMA-335: Common Language Infrastructure (CLI), 4th edition (June 2006)</a>
- */
-public class PEFile {
-
- //##########################################################################
-
- public static final int INT_SIZE = 4;
-
- protected final int PE_SIGNATURE_OFFSET;
- protected final int COFF_HEADER_OFFSET;
- protected final int PE_HEADER_OFFSET;
-
- protected final int numOfSections;
- protected final int CLI_RVA;
- protected final int CLI_Length;
- public final int rvaMetadata;
- public final int posMetadata;
- protected final int numOfStreams;
- protected final int optHeaderSize;
-
- protected final File underlyingFile;
- protected final RandomAccessFile file;
- protected final MappedByteBuffer buf;
-
- protected final PESection [] sections;
-
- public PEStream Meta, Strings, US, Blob, GUID;
-
- private final Table [] tables = new Table[Table.MAX_NUMBER];
-
- public final boolean isDLL;
-
- protected final int heapSizes;
- public final boolean StringIsShort, BlobIsShort, GUIDIsShort;
-
- protected PEModule pemodule = null;
-
- //##########################################################################
- // PEFile constructor
-
- private static void fileFormatCheck(boolean cond, String s) {
- if (cond)
- throw new RuntimeException(s);
- }
-
- /**
- */
- public PEFile(String filename) throws FileNotFoundException {
- this.underlyingFile = new File(filename);
- this.file = new RandomAccessFile(underlyingFile, "r");
- FileChannel fc = file.getChannel();
- MappedByteBuffer bb = null;
- try {
- bb = fc.map(FileChannel.MapMode.READ_ONLY, 0L, fc.size());
- } catch (IOException e) { throw new RuntimeException(e); }
-
- /** Ecma 335, 25 File format extensions to PE:
- *
- * "Unless stated otherwise, all binary values are stored in little-endian format."
- */
-
- bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
- this.buf = bb;
-
- /** Ecma 335, 25.2.1 MS-DOS header:
- *
- * "The PE format starts with an MS-DOS stub of exactly the following 128 bytes to
- * be placed at the front of the module."
- *
- * We are only checking for MZ (Mark Zbikowski)
- */
-
- seek(0);
- fileFormatCheck(readByte() != 0x4d, "Invalid PE file format: " + filename); // 'M'
- fileFormatCheck(readByte() != 0x5a, "Invalid PE file format: " + filename); // 'Z'
-
- /** Ecma 335, 25.2.1 MS-DOS header:
- *
- * "At offset 0x3c in the DOS header is a 4-byte unsigned integer offset, lfanew,
- * to the PE signature (shall be "PE\0\0"), immediately followed by the PE file header."
- */
-
- seek(0x3c);
- PE_SIGNATURE_OFFSET = readInt();
- seek(PE_SIGNATURE_OFFSET);
- // start of PE signature (a signature that is just 4 bytes long)
- fileFormatCheck(readByte() != 0x50, "Invalid PE file format: " + filename); // 'P'
- fileFormatCheck(readByte() != 0x45, "Invalid PE file format: " + filename); // 'E'
- fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0
- fileFormatCheck(readByte() != 0x00, "Invalid PE file format: " + filename); // 0
-
- //trace("PE signature offset = 0x" + Table.int2hex(PE_SIGNATURE_OFFSET));
-
- COFF_HEADER_OFFSET = PE_SIGNATURE_OFFSET + 4;
- PE_HEADER_OFFSET = COFF_HEADER_OFFSET + 20;
-
- seek(COFF_HEADER_OFFSET);
-
- /* start of PE file header, Sec. 25.2.2 in Partition II */
- skip(2); // Machine (always 0x14c)
- numOfSections = readShort(); // Number of sections; indicates size of the Section Table
- Date timeStamp = new Date(readInt() * 1000L);
- skip(2 * INT_SIZE); // skip Pointer to Symbol Table (always 0) and Number of Symbols (always 0)
- optHeaderSize = readShort();
- int characteristics = readShort();
- isDLL = (characteristics & 0x2000) != 0;
-
- seek(PE_HEADER_OFFSET + 208); // p.157, Partition II
-
- CLI_RVA = readInt(); // called "Data Directory Table" in Ch. 4 of Expert IL book
- CLI_Length = readInt();
- //trace("CLI_RVA = 0x" + Table.int2hex(CLI_RVA));
- //trace("CLI_Length = 0x" + Table.int2hex(CLI_Length));
-
- sections = new PESection[numOfSections];
-
- seek(PE_HEADER_OFFSET + optHeaderSize); // go to the sections descriptors
-
- for (int i = 0; i < numOfSections; i++) {
- seek(PE_HEADER_OFFSET + optHeaderSize + i * 40);
- sections[i] = new PESection(this);
- //sections[i].dump(System.out);
- }
-
- seek(fromRVA(CLI_RVA));
- skip(8);
- rvaMetadata = readInt();
- posMetadata = fromRVA(rvaMetadata);
- //trace("rvaMetadata = 0x" + Table.int2hex(rvaMetadata));
- //trace("posMetadata = 0x" + Table.int2hex(posMetadata));
-
- seek(posMetadata);
- int magic = readInt();
- //trace("Magic metadata signature = 0x" + Table.int2hex(magic));
- fileFormatCheck(magic != 0x424a5342, "Invalid metadata signature!");
- skip(8);
-
- int strlength = readInt();
- //trace("version name string length = " + strlength);
- skip(strlength);
- align(INT_SIZE, posMetadata);
- //trace("position of flags = 0x" + Table.int2hex((int)pos()));
- skip(2); // ignore the flags
- numOfStreams = readShort();
- //trace("Number of metadata streams = " + numOfStreams);
-
- for (int i = 0; i < numOfStreams; i++) {
- PEStream strm = new PEStream(this);
- //strm.dump(System.out);
- if (strm.name.equals("#~")
- || strm.name.equals("#-")) Meta = strm;
- if (strm.name.equals("#Strings")) Strings = strm;
- if (strm.name.equals("#US")) US = strm;
- if (strm.name.equals("#Blob")) Blob = strm;
- if (strm.name.equals("#GUID")) GUID = strm;
- }
-
- seek(Meta.offset);
- skip(6);
- heapSizes = readByte();
- StringIsShort = (heapSizes & 0x01) == 0;
- GUIDIsShort = (heapSizes & 0x02) == 0;
- BlobIsShort = (heapSizes & 0x04) == 0;
-
- skip(1);
- long tablesMask = readLong();
- long nonStandardTables = tablesMask & ~Table.VALID_TABLES_MASK;
- skip(8); //go to the list of number of rows
- for (int i = 0; i < tables.length; i++) {
- tables[i] = Table.newTable
- (this, i, ((tablesMask >> i) & 0x01) != 0 ? readInt() : 0);
- }
-
- initIndexSize();
- initTableRefs();
- // populate the tables from the CLI image file
- long start = pos();
- for (int i = 0; i < tables.length; i++)
- start = tables[i].init(start);
-
- } // PEFile()
-
-
- public final int[] indexSize = new int[Table.TABLE_SET_LENGTH];
-
- private void initIndexSize() {
- for (int i = 0; i < Table.TABLE_SET_LENGTH; i++) {
- indexSize[i] = 2;
- int[] tableSet = Table.TableSet[i];
- int treshold = (65536 >> Table.NoBits[i]);
- for (int j = 0; j < tableSet.length; j++) {
- if (tableSet[j] >= 0) {
- Table t = tables[tableSet[j]];
- if (t.rows >= treshold) {
- indexSize[i] = 4;
- break;
- }
- }
- }
- }
- }
-
- protected void initModule(PEModule module) {
- if (pemodule != null)
- throw new RuntimeException("File " + this
- + " has already been assigned module "
- + pemodule + "; new module is " + module);
- this.pemodule = module;
- }
-
- //##########################################################################
-
- public ModuleDef ModuleDef;
- public ModuleDef ModuleDef(int i) {
- ModuleDef.readRow(i);
- return ModuleDef;
- }
-
- public TypeRef TypeRef;
-
- public TypeDef TypeDef;
- public TypeDef TypeDef(int i) {
- TypeDef.readRow(i);
- return TypeDef;
- }
-
- public FieldTrans FieldTrans;
- public FieldTrans FieldTrans(int i) {
- FieldTrans.readRow(i);
- return FieldTrans;
- }
-
- public FieldDef FieldDef;
- public FieldDef FieldDef(int i) {
- FieldDef.readRow(i);
- return FieldDef;
- }
-
- public MethodTrans MethodTrans;
- public MethodTrans MethodTrans(int i) {
- MethodTrans.readRow(i);
- return MethodTrans;
- }
-
- public MethodDef MethodDef;
- public MethodDef MethodDef(int i) { MethodDef.readRow(i); return MethodDef; }
-
-
- public ParamDef ParamDef;
- public ParamDef ParamDef(int i) { ParamDef.readRow(i); return ParamDef; }
-
- public GenericParam GenericParam;
-
- public GenericParam GenericParam(int i) {
- GenericParam.readRow(i);
- return GenericParam;
- }
-
- public MethodSpec MethodSpec;
-
- public MethodSpec MethodSpec(int i) {
- MethodSpec.readRow(i);
- return MethodSpec;
- }
-
- public GenericParamConstraint GenericParamConstraint;
-
- public GenericParamConstraint GenericParamConstraint(int i) {
- GenericParamConstraint.readRow(i);
- return GenericParamConstraint;
- }
-
- public InterfaceImpl InterfaceImpl;
- public MemberRef MemberRef;
- public Constant Constant;
- public CustomAttribute CustomAttribute;
- public FieldMarshal FieldMarshal;
- public DeclSecurity DeclSecurity;
- public ClassLayout ClassLayout;
- public FieldLayout FieldLayout;
- public StandAloneSig StandAloneSig;
- public EventMap EventMap;
- public EventDef EventDef;
- public PropertyMap PropertyMap;
- public PropertyDef PropertyDef;
- public MethodSemantics MethodSemantics;
- public MethodImpl MethodImpl;
- public ModuleRef ModuleRef;
- public TypeSpec TypeSpec;
- public ImplMap ImplMap;
- public FieldRVA FieldRVA;
- public AssemblyDef AssemblyDef;
- public AssemblyRef AssemblyRef;
- public FileDef FileDef;
- public ExportedType ExportedType;
- public ManifestResource ManifestResource;
- public NestedClass NestedClass;
-
-
- private void initTableRefs() {
- ModuleDef = (ModuleDef) getTable(Table.ModuleDef.ID);
- TypeRef = (TypeRef) getTable(Table.TypeRef.ID);
- TypeDef = (TypeDef) getTable(Table.TypeDef.ID);
- FieldTrans = (FieldTrans) getTable(Table.FieldTrans.ID);
- FieldDef = (FieldDef) getTable(Table.FieldDef.ID);
- MethodTrans = (MethodTrans) getTable(Table.MethodTrans.ID);
- MethodDef = (MethodDef) getTable(Table.MethodDef.ID);
- ParamDef = (ParamDef) getTable(Table.ParamDef.ID);
- InterfaceImpl = (InterfaceImpl) getTable(Table.InterfaceImpl.ID);
- MemberRef = (MemberRef) getTable(Table.MemberRef.ID);
- Constant = (Constant) getTable(Table.Constant.ID);
- CustomAttribute = (CustomAttribute) getTable(Table.CustomAttribute.ID);
- FieldMarshal = (FieldMarshal) getTable(Table.FieldMarshal.ID);
- DeclSecurity = (DeclSecurity) getTable(Table.DeclSecurity.ID);
- ClassLayout = (ClassLayout) getTable(Table.ClassLayout.ID);
- FieldLayout = (FieldLayout) getTable(Table.FieldLayout.ID);
- StandAloneSig = (StandAloneSig) getTable(Table.StandAloneSig.ID);
- EventMap = (EventMap) getTable(Table.EventMap.ID);
- EventDef = (EventDef) getTable(Table.EventDef.ID);
- PropertyMap = (PropertyMap) getTable(Table.PropertyMap.ID);
- PropertyDef = (PropertyDef) getTable(Table.PropertyDef.ID);
- MethodSemantics = (MethodSemantics) getTable(Table.MethodSemantics.ID);
- MethodImpl = (MethodImpl) getTable(Table.MethodImpl.ID);
- ModuleRef = (ModuleRef) getTable(Table.ModuleRef.ID);
- TypeSpec = (TypeSpec) getTable(Table.TypeSpec.ID);
- ImplMap = (ImplMap) getTable(Table.ImplMap.ID);
- FieldRVA = (FieldRVA) getTable(Table.FieldRVA.ID);
- AssemblyDef = (AssemblyDef) getTable(Table.AssemblyDef.ID);
- AssemblyRef = (AssemblyRef) getTable(Table.AssemblyRef.ID);
- FileDef = (FileDef) getTable(Table.FileDef.ID);
- ExportedType = (ExportedType) getTable(Table.ExportedType.ID);
- NestedClass = (NestedClass) getTable(Table.NestedClass.ID);
- ManifestResource =
- (ManifestResource) getTable(Table.ManifestResource.ID);
- GenericParam = (GenericParam) getTable(Table.GenericParam.ID);
- MethodSpec = (MethodSpec) getTable(Table.MethodSpec.ID);
- GenericParamConstraint = (GenericParamConstraint) getTable(Table.GenericParamConstraint.ID);
- }
-
- public static String long2hex(long a) {
- StringBuffer str = new StringBuffer("0000000000000000");
- str.append(Long.toHexString(a));
- int l = str.length();
- return str.substring(l - 16, l);
- }
-
- public static String int2hex(int a) {
- StringBuffer str = new StringBuffer("00000000");
- str.append(Integer.toHexString(a));
- int l = str.length();
- return str.substring(l - 8, l);
- }
-
- public static String short2hex(int a) {
- StringBuffer str = new StringBuffer("0000");
- str.append(Integer.toHexString(a));
- int l = str.length();
- return str.substring(l - 4, l);
- }
-
- public static String byte2hex(int a) {
- StringBuffer str = new StringBuffer("00");
- str.append(Integer.toHexString(a));
- int l = str.length();
- return str.substring(l - 2, l);
- }
-
- public static String bytes2hex(byte[] buf) {
- StringBuffer str = new StringBuffer();
- for (int i = 0; i < buf.length; i++) {
- str.append(byte2hex(buf[i]));
- if (i < buf.length - 1)
- str.append(" ");
- }
- return str.toString();
- }
-
- //##########################################################################
- // filename
-
- public File getUnderlyingFile() {
- return underlyingFile;
- }
-
- /**
- * @return the absolute path of the file
- */
- public String getAbsolutePath() {
- return underlyingFile.getAbsolutePath();
- }
-
- /**
- * @return the name of this file
- */
- public String getName() {
- return underlyingFile.getName();
- }
-
- /**
- * @return
- */
- public String getParent() {
- return underlyingFile.getParent();
- }
-
- /**
- * @return the file representing the directory the file belongs to
- */
- public File getParentFile() {
- return underlyingFile.getParentFile();
- }
-
- public String toString() {
- return getAbsolutePath();
- }
-
- //##########################################################################
- // file pointer manipulation methods
-
- /** Returns the current position in the file. */
- public int pos() {
- return buf.position();
- }
-
- /** Go to the specified position in the file. */
- public void seek(int pos) {
- buf.position(pos);
- }
-
-
- /** Align the current position in the file. */
- public void align(int base) { align(base, 0); }
-
- /** Align the current position in a section starting at offset. */
- public void align(int base, int offset) {
- int p = pos() - offset;
- seek( offset + ((p % base) == 0 ? p : (p/base + 1) * base));
- }
-
- /** Computes the position in the file that corresponds to the given RVA. */
- public int fromRVA(int rva) {
- int i;
- for(i = 0; i < numOfSections; i++)
- if(sections[i].virtAddr <= rva &&
- rva <= (sections[i].virtAddr + sections[i].virtSize))
- return rva - sections[i].virtAddr + sections[i].realAddr;
- throw new RuntimeException("RVA 0x" + Integer.toHexString(rva) +
- " is not within this file's sections!");
- }
-
- /** Go to the specified RVA (Relative Virtual Address). */
- public void gotoRVA(int rva) {
- seek(fromRVA(rva));
- }
-
- /** Move the forward in the file by the specified number of bytes. */
- public void skip(int n) {
- buf.position(buf.position() + n);
- }
-
- /**
- * Returns a memory mapped little-endian buffer
- * for the specified region of the file.
- */
- public MappedByteBuffer mapBuffer(long offset, int size) {
- try {
- MappedByteBuffer b = file.getChannel()
- .map(FileChannel.MapMode.READ_ONLY, offset, size);
- b.order(java.nio.ByteOrder.LITTLE_ENDIAN);
- return b;
- } catch (IOException e) { throw new RuntimeException(e); }
- }
-
- /** Returns a buffer from the given offset to the end of the file. */
- public ByteBuffer getBuffer(long offset, int size) {
- buf.mark();
- buf.position((int)offset);
- ByteBuffer bb = buf.slice();
- buf.reset();
- bb.limit(size);
- bb.order(java.nio.ByteOrder.LITTLE_ENDIAN);
- return bb;
- }
-
- //##########################################################################
- // file read methods
-
- /**
- * Read bs.length number of bytes
- */
- public void read(byte[] bs) {
- buf.get(bs);
- }
-
- /**
- * Read 1-byte integer from the current position in the file.
- */
- public int readByte() {
- return buf.get();
- }
-
- /**
- * Read 2-byte integer from the current position in the file.
- */
- public int readShort() {
- return buf.getShort();
- }
-
- /**
- * Read 4-byte integer from the current position in the file.
- */
- public int readInt() {
- return buf.getInt();
- }
-
- /**
- * Read 8-byte integer from the current position in the file.
- */
- public long readLong() {
- return buf.getLong();
- }
-
- /**
- * @return the size of string indeces for this file.
- */
- public int getStringIndexSize() {
- return StringIsShort ? 2 : 4;
- }
-
- /**
- * @return the size of GUID indeces for this file.
- */
- public int getGUIDIndexSize() {
- return GUIDIsShort ? 2 : 4;
- }
-
- /**
- * @return the size of Blob indeces for this file.
- */
- public int getBlobIndexSize() {
- return BlobIsShort ? 2 : 4;
- }
-
- /**
- * @return the size of the index to tableID for this file;
- * @param tableID the ID of the table
- */
- public int getTableIndexSize(int tableID) {
- return tables[tableID].isShort ? 2 : 4;
- }
-
- /**
- * @return the size of the index to a set of tables with the given @param TableSetID
- * @param tableSetID the ID of the table set
- */
- public int getTableSetIndexSize(int tableSetID) {
- return indexSize[tableSetID];
- }
-
- /**
- * Read a String index from the current position in the file.
- * @return an index into the String stream
- */
- public int readStringIndex() {
- return StringIsShort ? readShort() : readInt();
- }
-
- /**
- * Read a GUID index from the current position in the file.
- * @return an index in to the GUID stream
- */
- public int readGUIDIndex() {
- return GUIDIsShort ? readShort() : readInt();
- }
-
- /**
- * Read a Blob index from the current position in the file.
- * @return an index into the Blob stream
- */
- public int readBlobIndex() {
- return BlobIsShort ? readShort() : readInt();
- }
-
- /** Read an entry interpreted as index into table @param tableID. */
- public int readTableIndex(int tableId) {
- return tables[tableId].isShort ? readShort() : readInt();
- }
-
- /***/
- public int readTableSetIndex(int tableSetId) {
- return indexSize[tableSetId] == 2 ? readShort() : readInt();
- }
-
- /**
- * Read a string from the String stream
- * @return the string at the given position
- * @param pos the position of the string in the String stream
- */
- public String getString(int pos) {
- String s = Strings.getString(pos);
- return s;//.length() == 0 ? null : s;
- }
-
- /**
- * Read a string from the US (User Strings) stream
- * @return the string at the given position
- * @param pos the position of the string in the US stream
- */
- public String getUString(int pos) {
- return US.getString(pos);
- }
-
- /**
- * Read a blob from the Blob Stream
- * @return the blob at the given position
- * @param pos the position of the blob in the Blob stream
- */
- public byte[] getBlob(int pos) {
- return Blob.getBlob(pos);
- }
-
- /***/
- public Sig getSignature(int pos) {
- //return new Sig(getBlob(pos));
- return Blob.getSignature(pos);
- }
-
- /***/
- public byte[] getGUID(int pos) {
- return GUID.getGUID(pos);
- }
-
- /**
- * @return the table with the corresponding ID.
- */
- public final Table getTable(int tableID) {
- return tables[tableID];
- }
-
- //##########################################################################
-
- /***/
- void trace(String msg) {
- System.out.println("[trace] " + msg);
- }
-
- //##########################################################################
-
- public Sig newSignature(ByteBuffer buf) {
- return new Sig(buf);
- }
-
- /**
- */
- public class Sig implements Signature {
-
- //######################################################################
- // instance members
-
- protected final ByteBuffer buf;
- protected final int pos;
- protected final int length;
-
- public Sig(ByteBuffer buf) {
- this.buf = buf;
- //int tmpPos = buf.position();
- length = decodeInt();
- this.pos = buf.position();
- }
-
- public String toString() {
- StringBuffer b = new StringBuffer("(");
- int savedPos = buf.position();
- reset();
- for (int i = 0; i < length; i++) {
- b.append(byte2hex(readByte()));
- if (i < length - 1)
- b.append(" ");
- }
- buf.position(savedPos);
- return b.append(")").toString();
- }
-
- public Sig reset() { buf.position(pos); return this; }
-
- public int pos() { return buf.position() - pos; }
-
- /** @return the byte at the current position in the signature Blob.
- * Stay at the same position
- */
- public int getByte() {
- return (buf.get(buf.position()) + 0x100) & 0xff;
- }
-
- /** @return the byte at the current position in the signature Blob.
- * Move to the next byte.
- */
- public int readByte() { return (buf.get() + 0x100) & 0xff; }
-
- /** Skip the current byte if equal to the given value. */
- public void skipByte(int b) { if (b == getByte()) buf.get(); }
-
- /** Decodes an integer from the signature Blob.
- * @return the decoded integer
- */
- public int decodeInt() {
- int res = readByte();
- if ((res & 0x80) != 0) {
- res = ((res & 0x7f) << 8) | readByte();
- if ((res & 0x4000) != 0)
- res = ((res & 0x3fff)<<16) | (readByte()<<8) | readByte();
- }
- return res;
- }
-
- /** @return - the type encoded at the current position in the signature
- * according to 23.2.12
- */
- public Type decodeType() {
- try { return decodeType0(); }
- catch (RuntimeException e) {
- System.out.println("" + pos() + "@" + this);
- throw e;
- }
- }
-
- public Type decodeType0() {
- Type type = null;
- int desc = readByte();
- switch (desc) {
- case ELEMENT_TYPE_BOOLEAN:type = Type.GetType("System.Boolean"); break;
- case ELEMENT_TYPE_CHAR: type = Type.GetType("System.Char"); break;
- case ELEMENT_TYPE_I1: type = Type.GetType("System.SByte"); break;
- case ELEMENT_TYPE_U1: type = Type.GetType("System.Byte"); break;
- case ELEMENT_TYPE_I2: type = Type.GetType("System.Int16"); break;
- case ELEMENT_TYPE_U2: type = Type.GetType("System.UInt16"); break;
- case ELEMENT_TYPE_I4: type = Type.GetType("System.Int32"); break;
- case ELEMENT_TYPE_U4: type = Type.GetType("System.UInt32"); break;
- case ELEMENT_TYPE_I8: type = Type.GetType("System.Int64"); break;
- case ELEMENT_TYPE_U8: type = Type.GetType("System.UInt64"); break;
- case ELEMENT_TYPE_R4: type = Type.GetType("System.Single"); break;
- case ELEMENT_TYPE_R8: type = Type.GetType("System.Double"); break;
- case ELEMENT_TYPE_OBJECT: type = Type.GetType("System.Object"); break;
- case ELEMENT_TYPE_STRING: type = Type.GetType("System.String"); break;
- case ELEMENT_TYPE_I: type = Type.GetType("System.IntPtr"); break;
- case ELEMENT_TYPE_U: type = Type.GetType("System.UIntPtr"); break;
- case ELEMENT_TYPE_PTR: // Followed by <type> token.
- if (getByte() == ELEMENT_TYPE_VOID) {
- readByte();
- type = Type.mkPtr(Type.GetType("System.Void"));
- } else type = Type.mkPtr(decodeType());
- break;
- case ELEMENT_TYPE_BYREF: /* although BYREF is not listed in 23.2.12. as possible alternative, this method is also called when parsing the signatures of a method param and a method return, which do allow for BYREF */
- type = Type.mkByRef(decodeType());
- break;
- case ELEMENT_TYPE_VALUETYPE: // Followed by TypeDefOrRefEncoded
- assert true;
- case ELEMENT_TYPE_CLASS:
- // Followed by <type> token
- type = pemodule.getTypeDefOrRef(decodeInt());
- if (type == null) throw new RuntimeException();
- break;
-
- case ELEMENT_TYPE_SZARRAY: // Single-dim array with 0 lower bound.
- skipCustomMods();
- type = Type.mkArray(decodeType(), 1);
- break;
- case ELEMENT_TYPE_ARRAY:
- // <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
- // ArrayShape defined in 23.2.13 ArrayShape
- Type elem = decodeType();
- int rank = decodeInt();
- int numSizes = decodeInt();
- for (int i = 0; i < numSizes; i++)
- decodeInt(); // TODO don't ignore
- int numLoBounds = decodeInt();
- for (int i = 0; i < numLoBounds; i++)
- decodeInt(); // TODO don't ignore
- type = Type.mkArray(elem, rank);
- break;
-
- // a grammar production from 23.2.12 Type
- // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncoded GenArgCount Type*
- case ELEMENT_TYPE_GENERICINST:
- int b = readByte();
- /*- TODO don't ignore b as done above. Should .NET valuetypes be represented as Scala case classes? */
- Type instantiatedType = pemodule.getTypeDefOrRef(decodeInt());
- int numberOfTypeArgs = decodeInt();
- Type[] typeArgs = new Type[numberOfTypeArgs];
- for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) {
- typeArgs[iarg] = decodeType();
- }
- type = new ConstructedType(instantiatedType, typeArgs);
- break;
-
- // another grammar production from 23.2.12 Type
- // ELEMENT_TYPE_VAR number The number non-terminal following MVAR
- // or VAR is an unsigned integer value (compressed).
- /* See also duplicate code in PEModule.java */
- case ELEMENT_TYPE_VAR:
- int typeArgAsZeroBased = decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, true);
- break;
-
- // another grammar production from 23.2.12 Type
- // ELEMENT_TYPE_MVAR number The number non-terminal following MVAR
- // or VAR is an unsigned integer value (compressed).
- /* See also duplicate code in PEModule.java */
- case ELEMENT_TYPE_MVAR:
- typeArgAsZeroBased = decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, false);
- break;
-
- case ELEMENT_TYPE_FNPTR:
- // Followed MethodDefSig or by MethodRefSig.
- case ELEMENT_TYPE_END:
- // Marks end of a list
- case ELEMENT_TYPE_CMOD_REQD:
- // Required modifier : followed by a TypeDef or TypeRef token.
- case ELEMENT_TYPE_CMOD_OPT:
- // Optional modifier : followed by a TypeDef or TypeRef token.
- case ELEMENT_TYPE_INTERNAL:
- // Implemented within the CLI.
- case ELEMENT_TYPE_MODIFIER:
- // Or'd with following element types.
- case ELEMENT_TYPE_SENTINEL:
- // Sentinel for varargs method signature.
- case ELEMENT_TYPE_PINNED:
- // Denotes a local variable that points at a pinned object.
- default:
- throw new RuntimeException(byte2hex(desc) +
- "@" + pos() + " in " + this);
-
- }
- if (type == null) throw new RuntimeException();
- return type;
- } // decodeType0()
-
- public PECustomMod decodeFieldType() {
- skipByte(FIELD); // 0x06
- CustomModifier[] cmods = getCustomMods();
- Type fieldType = decodeType();
- return new PECustomMod(fieldType, cmods);
- }
-
- /** decodes the return type of a method signature (22.2.11). */
- public Type decodeRetType() {
- skipCustomMods();
- switch (getByte()) {
- case ELEMENT_TYPE_VOID:
- readByte();
- return Type.GetType("System.Void");
- case ELEMENT_TYPE_TYPEDBYREF:
- return Type.GetType("System.TypedReference");
- case ELEMENT_TYPE_BYREF:
- return decodeType();
- default:
- return decodeType();
- }
- }
-
- public Type decodeParamType() {
- skipCustomMods();
- switch (getByte()) {
- case ELEMENT_TYPE_BYREF:
- return decodeType();
- case ELEMENT_TYPE_TYPEDBYREF:
- return Type.GetType("System.TypedReference");
- default:
- return decodeType();
- }
- }
-
- public void skipCustomMods() {
- while (getByte() == ELEMENT_TYPE_CMOD_OPT /* 0x20 */
- || getByte() == ELEMENT_TYPE_CMOD_REQD /* 0x1f */ )
- {
- boolean isREQD = (getByte() == ELEMENT_TYPE_CMOD_REQD); // 0x1f
- // skip the tag 23.2.7
- readByte();
- // skip the TypeDefOrRefEncoded (23.2.8)
- Type ignored = pemodule.getTypeDefOrRef(decodeInt());
- if(isREQD) {
- // System.err.println("ELEMENT_TYPE_CMOD_REQD: " + ignored);
- // throw new RuntimeException("Reqired CMOD: " + ignored);
- }
- }
- }
-
- /**
- * @see CustomModifier
- */
- public CustomModifier[] getCustomMods() {
- java.util.List/*<CustomModifier>*/ cmods = new java.util.LinkedList();
- while (getByte() == ELEMENT_TYPE_CMOD_OPT || getByte() == ELEMENT_TYPE_CMOD_REQD) {
- boolean isReqd = (getByte() == ELEMENT_TYPE_CMOD_REQD);
- readByte(); // tag 23.2.7
- Type t = pemodule.getTypeDefOrRef(decodeInt()); // TypeDefOrRefEncoded (23.2.8)
- cmods.add(new CustomModifier(isReqd, t));
- }
- CustomModifier[] res = (CustomModifier[])cmods.toArray(new CustomModifier[0]);
- return res;
- }
-
- //######################################################################
-
- } // class Sig
-
- //##########################################################################
-
-} // class PEFile
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java b/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
deleted file mode 100644
index cb8cd8f098..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEModule.java
+++ /dev/null
@@ -1,456 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-import ch.epfl.lamp.compiler.msil.util.Signature;
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.nio.ByteBuffer;
-
-/** Represents a module corresponding to a PE/COFF file
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEModule extends Module {
-
- //##########################################################################
-
- protected final PEFile pefile;
-
- private final int definingRow;
-
- private Type[] typeRefs = null;
-
- protected PEModule(PEFile pefile, int definingRow, String scopeName,
- Assembly assem)
- {
- super(pefile.getName(), pefile.getAbsolutePath(), scopeName, assem);
- this.pefile = pefile;
- this.definingRow = definingRow;
- pefile.initModule(this);
- pefile.TypeDef.load(); // load into memory
- //loadTypes();
- //pefile.FieldDef.load();
- //pefile.MethodDef.load();
- loadGlobals();
- }
-
- //##########################################################################
-
- public Type GetType(String typeName) {
- initTypes();
- Object o = typesMap.get(typeName);
- if (o == null) {
- //System.out.println("PEModule.GetType(): Unable to find type "
- // + typeName + " int module " + this);
- return null;
- }
- return o instanceof Type ? (Type)o
- : getTypeDef(((Integer)o).intValue());
- }
-
-
- /** Load information about the types defined in this module.
- */
- protected void loadTypes() {
- typeRefs = new Type[pefile.TypeRef.rows];
- final int nbTypes = pefile.TypeDef.rows;
- for (int row = 2; row <= nbTypes; row++) {
- String name = pefile.TypeDef(row).getFullName();
- typesMap.put(name, new Integer(row));
- }
- this.types = new Type[nbTypes - 1];
- for (int row = 2; row <= nbTypes; row++) {
- getTypeDef(row);
- }
- }
-
- /** Return the type defined at the given row in the TypeDef table.
- */
- Type getTypeDef(int row) {
- if (this.types[row - 2] != null)
- return this.types[row - 2];
-
- TypeDef type = pefile.TypeDef(row);
- int attrs = type.Flags;
- String name = type.getFullName();
-
- Type declType = null;
- if (TypeAttributes.isNested(attrs)) {
- for (int i = 1; i <= pefile.NestedClass.rows; i++) {
- pefile.NestedClass.readRow(i);
- if (pefile.NestedClass.NestedClass == row)
- declType = getTypeDef
- (pefile.NestedClass.EnclosingClass);
- }
- }
- Type t = new PEType
- (this, attrs, name, declType, Type.AuxAttr.None, pefile, row);
- types[row - 2] = t;
- addType(t);
- int[] tvarIdxes = pefile.GenericParam.getTVarIdxes(row);
- // if(tvarIdxes.length > 0) { System.out.println("Type: " + t); }
- for(int i = 0; i < tvarIdxes.length; i++) {
- GenericParamAndConstraints tvarAndConstraints = getTypeConstraints(tvarIdxes[i]);
- // add tvarAndConstraints as i-th TVar in t
- t.addTVar(tvarAndConstraints);
- }
- return t;
- }
-
- public GenericParamAndConstraints getTypeConstraints(int genParamIdx) {
- int tvarNumber = pefile.GenericParam(genParamIdx).Number;
- // tvarName can be null
- String tvarName = pefile.GenericParam.getName();
- boolean isInvariant = pefile.GenericParam.isInvariant();
- boolean isCovariant = pefile.GenericParam.isCovariant();
- boolean isContravariant = pefile.GenericParam.isContravariant();
- boolean isReferenceType = pefile.GenericParam.isReferenceType();
- boolean isValueType = pefile.GenericParam.isValueType();
- boolean hasDefaultConstructor = pefile.GenericParam.hasDefaultConstructor();
- // grab constraints
- int[] TypeDefOrRefIdxes = pefile.GenericParamConstraint.getTypeDefOrRefIdxes(genParamIdx);
- Type[] tCtrs = new Type[TypeDefOrRefIdxes.length];
- for(int i = 0; i < TypeDefOrRefIdxes.length; i++) {
- Type tConstraint = getTypeDefOrRef(TypeDefOrRefIdxes[i]);
- tCtrs[i] = tConstraint;
- // System.out.println("\t\tConstraint: " + tConstraint);
- }
- GenericParamAndConstraints res = new GenericParamAndConstraints(tvarNumber, tvarName, tCtrs,
- isInvariant, isCovariant, isContravariant,
- isReferenceType, isValueType, hasDefaultConstructor);
- return res;
- }
-
- /**
- * Load the desription of the module-global fields and methods
- */
- protected void loadGlobals() {
- //TODO:
- }
-
- protected void loadCustomAttributes(Type attributeType) {
- initAttributes(this, 1, Table.ModuleDef.ID, attributeType);
- }
-
- /** Return the type referenced by the given row in the TypeRef table.
- */
- Type getTypeRef(int row) {
- return getTypeRef(row, null);
- }
-
- /** Return the type referenced by the given row in the TypeRef table
- * only if it resides in the given assembly.
- * <i>Used by initCustomAttributes to avoid unnecessary loading
- * of referenced assemblies.</i>
- */
- Type getTypeRef(int row, Assembly inAssembly) {
- Type type = typeRefs[row - 1];
- if (type != null)
- return type;
-
- Table.TypeRef tr = pefile.TypeRef;
- tr.readRow(row);
- int tableId = Table.getTableId(Table._ResolutionScope,
- tr.ResolutionScope);
- int refRow = tr.ResolutionScope >> Table.NoBits[Table._ResolutionScope];
- final String typeName = tr.getFullName();
- pefile.getTable(tableId).readRow(refRow);
- switch (tableId) {
- case AssemblyRef.ID:
- String name = pefile.AssemblyRef.getName();
- if (inAssembly != null && !inAssembly.GetName().Name.equals(name))
- return null;
- Assembly assem = getAssembly(name);
- type = assem.GetType(typeName);
- if (type == null) {
- // HACK: the IKVM.OpenJDK.Core assembly is compiled against mscorlib.dll v2.0
- // The MSIL library cannot parse the v2.0 mscorlib because of generics, so we
- // use the v1.0
- // However, the java.io.FileDescriptor.FlushFileBuffers method uses a type
- // Microsoft.Win32.SafeHandles.SafeFileHandle, which only exists in mscorlib
- // v2.0
- // For now, jsut return Object (fine as long as we don't use that method).
- Assembly asmb = getAssembly("mscorlib");
- type = asmb.GetType("System.Object");
- //throw new RuntimeException("Failed to locate type " +
- //typeName + " in assembly " + assem);
- }
- break;
- case ModuleDef.ID:
- assert refRow == 1;
- type = this.GetType(typeName);
- //assert type != null;
- break;
- case TypeRef.ID:
- Type nestingType = getTypeRef(refRow);
- String nestedName = typeName;
- type = nestingType.GetNestedType(nestedName);
- break;
- case ModuleRef.ID:
- type = getAssembly(pefile.ModuleRef.getName()).GetType(typeName);
- default:
- throw new RuntimeException(refRow + "@" + pefile.getTable(tableId).getTableName()/* PEFile.byte2hex(tableId)*/);
- }
- if (typeRefs[row - 1] != null)
- System.out.println("TypeRef[" + PEFile.short2hex(row) + "] " +
- "changing type " + typeRefs[row - 1] +
- " for type " + type);
- typeRefs[row - 1] = type;
- assert type != null : "Couldn't find type " + typeName;
- return type;
- }
-
- private Assembly getAssembly(String name) {
- Assembly assem = Assembly.getAssembly(name);
- if (assem != null)
- return assem;
- java.io.File dir = pefile.getParentFile();
- assem = Assembly.LoadFrom(dir, name);
- if (assem != null)
- return assem;
- try {
- dir = pefile.getUnderlyingFile().getCanonicalFile().getParentFile();
- } catch (java.io.IOException e) {
- throw new RuntimeException(e);
- }
- assem = Assembly.LoadFrom(dir, name);
- if (assem != null)
- return assem;
- throw new RuntimeException("Cannot find assembly: " + name);
-
- }
-
- /** Return the type corresponding to TypeDefOrRef coded index.
- * @param index - TypeDefOrRef coded index according to 23.2.6.
- */
- public Type getTypeDefOrRef(int index) {
- int tableId = Table.getTableId(Table._TypeDefOrRef, index);
- int row = index >> Table.NoBits[Table._TypeDefOrRef];
- Type type = null;
- switch (tableId) {
- case Table.TypeDef.ID:
- type = getTypeDef(row);
- break;
- case Table.TypeRef.ID:
- return getTypeRef(row);
- case Table.TypeSpec.ID:
- Table.TypeSpec ts = pefile.TypeSpec;
- ts.readRow(row);
- int posInBlobStream = ts.Signature;
- byte[] blobArrWithLengthStripped = pefile.Blob.getBlob(posInBlobStream);
- byte[] compressedUInt = compressUInt(blobArrWithLengthStripped.length);
- byte[] byteArr = new byte[blobArrWithLengthStripped.length + compressedUInt.length];
- System.arraycopy(compressedUInt, 0, byteArr, 0, compressedUInt.length);
- System.arraycopy(blobArrWithLengthStripped, 0, byteArr, compressedUInt.length, blobArrWithLengthStripped.length);
- ByteBuffer buf = ByteBuffer.wrap(byteArr);
- Sig sig = pefile.new Sig(buf);
- int desc = sig.readByte();
-
- switch (desc) {
-
- // GENERICINST (CLASS | VALUETYPE) TypeDefOrRefEncodred GenArgCount Type*
- case Signature.ELEMENT_TYPE_GENERICINST: // i.e. 0x15
- int b = sig.readByte(); // i.e. (0x12 | 0x11)
- /* TODO don't ignore b as done above */
- Type instantiatedType = getTypeDefOrRef(sig.decodeInt()); // TypeDefOrRefEncoded
- int numberOfTypeArgs = sig.decodeInt(); // GenArgCount
- Type[] typeArgs = new Type[numberOfTypeArgs];
- for (int iarg = 0; iarg < numberOfTypeArgs; iarg++) {
- typeArgs[iarg] = sig.decodeType(); // Type*
- }
- type = new ConstructedType(instantiatedType, typeArgs);
- break;
-
- /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob
- but I've found it in assemblies compiled from C# 3.0.
- See also duplicate code in PEFile.java */
- case Signature.ELEMENT_TYPE_VAR:
- int typeArgAsZeroBased = sig.decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, true);
- break;
-
- /* Miguel says: Actually the following grammar rule production is not among those for a TypeSpecBlob
- but I've found it in assemblies compiled from C# 3.0.
- See also duplicate code in PEFile.java */
- case Signature.ELEMENT_TYPE_MVAR:
- typeArgAsZeroBased = sig.decodeInt();
- type = new Type.TMVarUsage(typeArgAsZeroBased, false);
- break;
-
- case Signature.ELEMENT_TYPE_SZARRAY: // Single-dim array with 0 lower bound.
- sig.skipCustomMods();
- type = Type.mkArray(sig.decodeType(), 1);
- break;
-
- case Signature.ELEMENT_TYPE_ARRAY:
- // <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
- // ArrayShape defined in 23.2.13 ArrayShape
- Type elem = sig.decodeType();
- int rank = sig.decodeInt();
- int numSizes = sig.decodeInt();
- for (int i = 0; i < numSizes; i++)
- sig.decodeInt(); // TODO don't ignore
- int numLoBounds = sig.decodeInt();
- for (int i = 0; i < numLoBounds; i++)
- sig.decodeInt(); // TODO don't ignore
- type = Type.mkArray(elem, rank);
- break;
-
- default:
- // TODO remaining grammar productions in 23.2.14 are for PTR and FNPTR only
- throw new RuntimeException("PEModule.getTypeDefOrRef(): TypeSpec");
- }
- break;
- default:
- throw new RuntimeException("PEModule.getTypeDefOrRef(): oops!");
- }
- return type;
- }
-
- private byte[] compressUInt(int u) {
- // 23.2 in Partition II
- // TODO add tests based on the examples in 23.2 in Partition II
- // the CCI implementation is WriteCompressedUInt
-
- /* informal discussion at http://www.cnblogs.com/AndersLiu/archive/2010/02/09/en-compressed-integer-in-metadata.html */
- if (u <= 127 && 0 <= u) {
- return new byte[]{(byte) u};
- } else if (u > 127 && u <= (2 ^ 14 - 1)) {
- byte loByte = (byte)(u & 0xff);
- byte hiByte = (byte)((u >> 8) | 0x80);
- byte[] res = new byte[] { hiByte, loByte };
- return res;
- } else {
- byte b0 = (byte)(u & 0xff);
- byte b1 = (byte)((u & 0xff00)>>8);
- byte b2 = (byte)((u & 0xff0000)>>16);
- byte b3 = (byte)((u >> 24)|0xc0);
- byte[] res = new byte[] { b3, b2, b1, b0 };
- return res;
- }
- }
-
- /**
- * Returns the method defined at the given row of the MethodDef table
- * by looking up the type that defines the method.
- */
- MethodBase getMethod(int row) {
- for (int i = 0; i < types.length; i++) {
- PEType type = (PEType)types[i];
- if ((type.methodListBeg <= row) && (row < type.methodListEnd)) {
- type.initMethods();
- return type.methoddefs[row - type.methodListBeg];
- }
- }
- throw new RuntimeException("In module " + this
- + ": cannot find type defining method 0x"
- + PEFile.int2hex(row));
- }
-
- /** Returns the member referenced by the given row of the MemberRef table.
- */
- protected MemberInfo getMemberRef(int row) {
- return getMemberRef(row, null);
- }
-
- /** Returns the member referenced by the given row of the MemberRef table
- * if defined in the given assembly.
- * <i>Used by initCustomAttributes to avoid unnecessary loading of
- * referenced assemblies</i>
- */
- protected MemberInfo getMemberRef(int row, Assembly inAssembly) {
- MemberInfo member = null;
- MemberRef mref = pefile.MemberRef;
- mref.readRow(row);
- int mtbl = Table.getTableId(Table._MemberRefParent, mref.Class);
- int mind = Table.getTableIndex(Table._MemberRefParent, mref.Class);
- switch (mtbl) {
- case TypeRef.ID:
- Type type = getTypeRef(mind, inAssembly);
- if (type == null)
- return null;
- Sig sig = mref.getSignature();
- int callconv = sig.readByte(); // should be 0x20
- int paramCount = sig.decodeInt();
- //sig.skipByte(Signature.ELEMENT_TYPE_BYREF); //from MethodDef
- Type retType = sig.decodeRetType();
- Type[] paramType = new Type[paramCount];
- for (int i = 0; i < paramCount; i++)
- paramType[i] = sig.decodeParamType();
-
- String memberName = mref.getName();
- if (memberName.equals(ConstructorInfo.CTOR) ||
- memberName.equals(ConstructorInfo.CCTOR))
- {
- member = type.GetConstructor(paramType);
- } else {
- member = type.GetMethod(memberName, paramType);
- }
- assert member != null : type + "::" + memberName;
- break;
- case ModuleRef.ID:
- case MethodDef.ID:
- case TypeSpec.ID:
- throw new RuntimeException("initCustomAttributes: "
- + pefile.getTable(mtbl).getTableName());
- }
- return member;
- }
-
- protected void initCustomAttributes(Type attributeType) {
- initAttributes(this, definingRow, Table.ModuleDef.ID, attributeType);
- }
-
- // explicitly only package-visible
- void initAttributes(CustomAttributeProvider cap, int definingRow,
- int sourceTableId, Type attributeType)
- {
- int parentIndex = Table.encodeIndex(definingRow,
- Table._HasCustomAttribute,
- sourceTableId);
- Table.CustomAttribute attrs = pefile.CustomAttribute;
- for (int row = 1; row <= attrs.rows; row++) {
- ConstructorInfo attrConstr = null;
- attrs.readRow(row);
- if (attrs.Parent == parentIndex) {
- int tableId = Table.getTableId(Table._CustomAttributeType,
- attrs.Type);
- int ind = Table.getTableIndex(Table._CustomAttributeType,
- attrs.Type);
- switch (tableId) {
- case MethodDef.ID:
- attrConstr = (ConstructorInfo)this.getMethod(ind);
- break;
- case MemberRef.ID:
- //System.out.println(PEFile.short2hex(ind) + "@MemberRef");
- Assembly attrAssem =
- attributeType == null ? null : attributeType.Assembly();
- MemberInfo mi = this.getMemberRef(ind, attrAssem);
- if (mi != null) {
- assert mi instanceof ConstructorInfo
- : "Expected ConstructorInfo; found " + mi;
- attrConstr = (ConstructorInfo)mi;
- }
- break;
- default:
- throw new RuntimeException();
- }
- if (attrConstr != null
- && (attrConstr.DeclaringType == attributeType
- || attributeType == null))
- cap.addCustomAttribute(attrConstr, attrs.getValue());
- }
- }
- }
-
- //##########################################################################
-
-} // class PEModule
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java b/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
deleted file mode 100644
index 418c6603b3..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PEType.java
+++ /dev/null
@@ -1,419 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-import ch.epfl.lamp.compiler.msil.util.Signature;
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-import java.util.ArrayList;
-
-/**
- * Represents a type from a .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-final class PEType extends Type implements Signature {
-
- //##########################################################################
-
- /** The PEFile that holds the description of the type. */
- final PEFile file;
-
- /** The number of the row in the TypeDef table defining the type. */
- final int definingRow;
-
- /** The row of the first method in the MethodDef table. */
- final int methodListBeg;
-
- /** The row of the last method in the MethodDef table + 1. */
- final int methodListEnd;
-
- /** @param definingRow - the index in the TypeDef table where
- * the type description is.
- */
- PEType(PEModule module,
- int attributes,
- String fullName,
- Type declType,
- int auxAttr,
- PEFile file,
- int definingRow)
- {
- super(module, attributes, fullName, null, null, declType, auxAttr);
- this.file = file;
- this.definingRow = definingRow;
- methodListBeg = file.TypeDef(definingRow).MethodList;
- methodListEnd = definingRow < file.TypeDef.rows
- ? file.TypeDef(definingRow + 1).MethodList
- : file.MethodDef.rows + 1;
- }
-
- //##########################################################################
- // lazy type construction methods
-
- protected void loadBaseType() {
- TypeDef type = file.TypeDef(definingRow);
- baseType = type.Extends == 0 ? null
- : ((PEModule)Module).getTypeDefOrRef(type.Extends);
- }
-
- protected void loadFields() {
- // the list of the declared fields starts from the
- // FieldList index in the TypeDef table up to the smaller of the:
- // - the last row of the FieldDef table
- // - the start of the next list of fields determined by the
- // FieldList index of the next row in the TypeDef table
- final ArrayList fields = new ArrayList();
- int fieldListBeg = file.TypeDef(definingRow).FieldList;
- int fieldListEnd = file.FieldDef.rows + 1;
- if (definingRow < file.TypeDef.rows)
- fieldListEnd = file.TypeDef(definingRow + 1).FieldList;
-
- for (int row = fieldListBeg; row < fieldListEnd; row++) {
- int frow = file.FieldTrans.rows == 0
- ? row : file.FieldTrans(row).Field;
- int attrs = file.FieldDef(frow).Flags;
- String name = file.FieldDef.getName();
- //System.out.println("\t-->Loading field: " + name);
- Sig sig = file.FieldDef.getSignature();
- PECustomMod pecmod = sig.decodeFieldType();
- Object val = null;
- Table.Constant consts = file.Constant;
- for (int i = 1; i <= consts.rows; i++) {
- consts.readRow(i);
- int tableId = Table.getTableId(Table._HasConstant,consts.Parent);
- int refRow = consts.Parent >> Table.NoBits[Table._HasConstant];
- if (tableId == Table.FieldDef.ID && refRow == frow)
- val = consts.getValue();
- }
- FieldInfo field = new PEFieldInfo(row, name, attrs, pecmod, val);
- if (field.Name.equals("value__") && field.IsSpecialName()) {
- assert underlyingType == null : underlyingType.toString();
- underlyingType = field.FieldType;
- }
- fields.add(field);
- }
- this.fields = (FieldInfo[])
- fields.toArray(FieldInfo.EMPTY_ARRAY);
- fields.clear();
- }
-
- protected MethodBase[] methoddefs;
-
- protected MethodInfo getMethod(int n) {
- return (MethodInfo)methoddefs[n - methodListBeg];
- }
-
- protected void loadMethods() {
- methoddefs = new MethodBase[methodListEnd - methodListBeg];
-
- final ArrayList methods = new ArrayList();
- final ArrayList constrs = new ArrayList();
- PEModule pemodule = (PEModule) Module;
- for (int row = methodListBeg; row < methodListEnd; row++) {
- int mrow = file.MethodTrans.rows == 0
- ? row : file.MethodTrans(row).Method;
- int attrs = file.MethodDef(mrow).Flags;
- String name = file.MethodDef.getName();
- Sig sig = file.MethodDef.getSignature();
- /* we're about to parse a MethodDefSig, defined in Sec. 23.2.1 of Partition II () */
-
- int callConv = sig.readByte();
- // TODO decode HASTHIS from high byte of calling convention
- // TODO decode EXPLICITTHIS from high byte of calling convention
- // TODO handle VARARG calling convention (not CLS but may show up )
- if((callConv & 0x1F) == Signature.GENERIC) {
- int genParamCount = sig.decodeInt();
- /* genParamCount is ignored because the method's type params will be obtained below
- (see: file.GenericParam.getMVarIdxes(row) ) */
- }
- int paramCount = sig.decodeInt();
- Type retType = sig.decodeRetType();
- Type[] paramType = new Type[paramCount];
- for (int i = 0; i < paramCount; i++)
- paramType[i] = sig.decodeParamType();
-
- ParameterInfo[] params = new ParameterInfo[paramCount];
- int paramListBeg = file.MethodDef.ParamList;
- int paramListEnd = paramListBeg + paramCount;
- if (paramListEnd > file.ParamDef.rows) {
- /* don't try to read param names past ParamDef's row count
- Some assembly-writers don't bother to give names for all params. */
- paramListEnd = file.ParamDef.rows + 1;
- }
- for (int i = paramListBeg; i < paramListEnd; i++) {
- int pattr = file.ParamDef(i).Flags;
- String paramName = file.ParamDef.getName();
- int seq = file.ParamDef.Sequence;
- if (seq == 0) {
- //System.out.println("Retval attributes 0x" +
- // PEFile.short2hex(pattr));
- } else {
- params[seq - 1] = new ParameterInfo(paramName, paramType[seq - 1], pattr, seq - 1);
- }
- }
- for (int i = 0; i < params.length; i++) {
- if (params[i] == null)
- params[i] = new ParameterInfo(null, paramType[i], 0, 0);
- }
- MethodBase method = null;
- if ((attrs & MethodAttributes.SpecialName) != 0
- && (attrs & MethodAttributes.RTSpecialName) != 0
- && (name.equals(ConstructorInfo.CTOR)
- || name.equals(ConstructorInfo.CCTOR)))
- {
- method = new PEConstructorInfo(row, attrs, params);
- }
- else {
- method = new PEMethodInfo(row, name, attrs, retType, params);
- int[] mvarIdxes = file.GenericParam.getMVarIdxes(row);
- // if(mvarIdxes.length > 0) { System.out.println("Method: " + method); }
- for(int i = 0; i < mvarIdxes.length; i++) {
- GenericParamAndConstraints mvarAndConstraints = pemodule.getTypeConstraints(mvarIdxes[i]);
- // add mvarAndConstraints as i-th MVar in method
- ((PEMethodInfo)method).addMVar(mvarAndConstraints);
- }
- }
- (method.IsConstructor() ? constrs : methods).add(method);
- methoddefs[row - methodListBeg] = method;
- }
-
- this.constructors = (ConstructorInfo[])
- constrs.toArray(ConstructorInfo.EMPTY_ARRAY);
- this.methods = (MethodInfo[])
- methods.toArray(MethodInfo.EMPTY_ARRAY);
- constrs.clear(); methods.clear();
- }
-
- protected void loadProperties() {
- final PropertyMap pmap = file.PropertyMap;
- if (pmap == null) {
- properties = PropertyInfo.EMPTY_ARRAY;
- return;
- }
-
- final PropertyDef pdef = file.PropertyDef;
- int propListBeg = -1;
- int propListEnd = pdef.rows + 1;
- for (int i = 1; i <= pmap.rows; i++) {
- pmap.readRow(i);
- if (pmap.Parent == this.definingRow) {
- propListBeg = pmap.PropertyList;
- if (i < pmap.rows) {
- pmap.readRow(i + 1);
- propListEnd = pmap.PropertyList;
- }
- break;
- }
- }
- if (propListBeg < 0) {
- properties = PropertyInfo.EMPTY_ARRAY;
- return;
- }
-
- final ArrayList properties = new ArrayList();
- for (int i = propListBeg; i < propListEnd; i++) {
- pdef.readRow(i);
- Sig sig = pdef.getSignature();
- int b = sig.readByte();
- b &= ~HASTHIS;
- int paramCount = sig.readByte();
- assert b == PROPERTY;
- Type propType = sig.decodeType();
- int index = Table.encodeIndex(i, Table._HasSemantics,
- Table.PropertyDef.ID);
- MethodSemantics msem = file.MethodSemantics;
- MethodInfo getter = null, setter = null;
- for (int j = 1; j <= msem.rows; j++) {
- msem.readRow(j);
- if (msem.Association != index)
- continue;
- if (msem.isGetter())
- getter = getMethod(msem.Method);
- else if (msem.isSetter())
- setter = getMethod(msem.Method);
- else
- System.err.println("PEType.loadProperties(): !?!");
- }
- properties.add
- (new PEPropertyInfo(i, pdef.getName(), (short)pdef.Flags,
- propType, getter, setter));
- }
- this.properties = (PropertyInfo[]) properties
- .toArray(PropertyInfo.EMPTY_ARRAY);
- }
-
- protected void loadEvents() {
- EventMap emap = file.EventMap;
- if (emap == null) {
- this.events = EventInfo.EMPTY_ARRAY;
- return;
- }
-
- final EventDef edef = file.EventDef;
- int eventListBeg = -1;
- int eventListEnd = edef.rows + 1;
- for (int i = 1; i <= emap.rows; i++) {
- emap.readRow(i);
- if (emap.Parent == this.definingRow) {
- eventListBeg = emap.EventList;
- if (i < emap.rows) {
- emap.readRow(i + 1);
- eventListEnd = emap.EventList;
- }
- break;
- }
- }
- if (eventListBeg < 0) {
- this.events = EventInfo.EMPTY_ARRAY;
- return;
- }
-
- final ArrayList events = new ArrayList();
- final MethodSemantics msem = file.MethodSemantics;
- for (int i = eventListBeg; i < eventListEnd; i++) {
- edef.readRow(i);
- final Type handler =
- ((PEModule)Module).getTypeDefOrRef(edef.EventType);
- int index =
- Table.encodeIndex(i, Table._HasSemantics, Table.EventDef.ID);
- MethodInfo add = null, remove = null;
- for (int j = 1; j <= msem.rows; j++) {
- msem.readRow(j);
- if (msem.Association != index)
- continue;
- if (msem.isAddOn())
- add = getMethod(msem.Method);
- else if (msem.isRemoveOn())
- remove = getMethod(msem.Method);
- else {
- }
- }
- events.add(new PEEventInfo(i, edef.getName(),
- (short)edef.EventFlags,
- handler, add, remove));
- }
- this.events = (EventInfo[]) events
- .toArray(EventInfo.EMPTY_ARRAY);
- }
-
- protected void loadNestedTypes() {
- final ArrayList nested = new ArrayList();
- for (int i = 1; i <= file.NestedClass.rows; i++) {
- file.NestedClass.readRow(i);
- if (file.NestedClass.EnclosingClass == this.definingRow)
- nested.add(((PEModule)Module)
- .getTypeDef(file.NestedClass.NestedClass));
- }
- this.nestedTypes = (Type[]) nested.toArray(Type.EmptyTypes);
- }
-
- protected void loadInterfaces() {
- // get the interfaces implemented by this class
- interfaces = Type.EmptyTypes;
- int index = file.InterfaceImpl.findType(definingRow);
- if (index > 0) {
- ArrayList ifaces = new ArrayList();
- for (int i = index; i <= file.InterfaceImpl.rows; i++) {
- file.InterfaceImpl.readRow(i);
- if (file.InterfaceImpl.Class != definingRow)
- break;
- ifaces.add(((PEModule)Module)
- .getTypeDefOrRef(file.InterfaceImpl.Interface));
- }
- interfaces = (Type[]) ifaces.toArray(new Type[ifaces.size()]);
- }
- }
-
- protected void loadCustomAttributes(Type attributeType) {
- initAttributes(this, definingRow, Table.TypeDef.ID, attributeType);
- }
-
- private void initAttributes(CustomAttributeProvider cap, int definingRow,
- int sourceTableId, Type attributeType)
- {
- ((PEModule)this.Module).initAttributes
- (cap, definingRow, sourceTableId, attributeType);
- }
-
- //##########################################################################
-
- private class PEFieldInfo extends FieldInfo {
- private final int definingRow;
- public PEFieldInfo(int definingRow, String name,
- int attrs, PECustomMod pecmod, Object value)
- {
- super(name, PEType.this, attrs, pecmod, value);
- this.definingRow = definingRow;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.FieldDef.ID, attributeType);
- }
- }
-
- private class PEMethodInfo extends MethodInfo {
- private final int definingRow;
- public PEMethodInfo(int row, String name,
- int attrs, Type retType, ParameterInfo[] params)
- {
- super(name, PEType.this, attrs, retType, params);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.MethodDef.ID, attributeType);
- }
- }
-
- private class PEConstructorInfo extends ConstructorInfo {
- private final int definingRow;
- public PEConstructorInfo(int row, int attrs, ParameterInfo[] params) {
- super(PEType.this, attrs, params);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.MethodDef.ID, attributeType);
- }
- }
-
- private class PEPropertyInfo extends PropertyInfo {
- private final int definingRow;
- public PEPropertyInfo(int row, String name, short attrs, Type propType,
- MethodInfo getter, MethodInfo setter)
- {
- super(name, PEType.this, attrs, propType, getter, setter);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.PropertyDef.ID, attributeType);
- }
- }
-
- private class PEEventInfo extends EventInfo {
- private final int definingRow;
- public PEEventInfo(int row, String name, short attrs, Type handler,
- MethodInfo add, MethodInfo remove)
- {
- super(name, PEType.this, attrs, handler, add, remove);
- this.definingRow = row;
- }
- protected void loadCustomAttributes(Type attributeType) {
- PEType.this.initAttributes
- (this, definingRow, Table.EventDef.ID, attributeType);
- }
- }
-
- //##########################################################################
-
-} // class PEType
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
deleted file mode 100644
index d4360363fc..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterAttributes.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Defines the attributes that may be associated with a parameter.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class ParameterAttributes {
-
- // just to make the class uninstantiable
- private ParameterAttributes() {}
-
- //##########################################################################
-
- /** Specifies that there is no parameter attribute. */
- public static final short None = 0x0000;
-
- /** Specifies that the parameter is an input parameter. */
- public static final short In = 0x0001;
-
- /** Specifies that the parameter is an output parameter. */
- public static final short Out = 0x0002;
-
- /** Specifies that the parameter is a locale identifier. */
- public static final short Lcid = 0x0004;
-
- /** Specifies that the parameter is a return value. */
- public static final short Retval = 0x0008;
-
- /** Specifies that the parameter is optional.
- * Attention: In the specification the value is 0x0004 but
- * in mscorlib.dll that it Lcid and Optional is 0x0010
- */
- public static final short Optional = 0x0010;
-
- /** Specifies that the parameter has a default value. */
- public static final short HasDefault = 0x1000;
-
- /** Specifies that the parameter has field marshaling information. */
- public static final short HasFieldMarshal = 0x2000;
-
- /** Reserved. */
- public static final short Reserved3 = 0x4000;
-
- /** Reserved. */
- public static final short Reserved4 = (short)0x8000;
-
- /** Specifies that the parameter is reserved. */
- public static final short ReservedMask = (short)0xf000;
-
- /** Reserved: shall be zero in all conforming implementations. */
- public static final short Unused = (short) 0xcfe0;
-
- public static final String toString(int attrs) {
- StringBuffer s = new StringBuffer();
- if ((attrs & In) != 0) s.append("in ");
- if ((attrs & Out) != 0) s.append("out ");
- if ((attrs & Optional) != 0) s.append("opt ");
- if ((attrs & HasDefault) != 0) s.append("default(???) ");
- if ((attrs & HasFieldMarshal) != 0) s.append("marshal(???) ");
- return s.toString();
- }
-
- //##########################################################################
-
-} // class ParameterAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
deleted file mode 100644
index 877d7aa8a5..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/ParameterInfo.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a parameter and provides access to
- * parameter metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class ParameterInfo extends CustomAttributeProvider {
-
- //##########################################################################
-
- /** Attributes of the parameter. */
- public final short Attributes;
-
- /** Name of the parameter. */
- public final String Name;
-
- /** Type of the parameter. */
- public final Type ParameterType;
-
- /** Position of the parameter in the parameter list. */
- public final int Position;
-
- //##########################################################################
-
- /** Is this an input parameter? */
- public final boolean IsIn() {
- return (Attributes & ParameterAttributes.In) != 0;
- }
-
- /** Is this an output parameter? */
- public final boolean IsOut() {
- return (Attributes & ParameterAttributes.Out) != 0;
- }
-
- /** Is this an Lcid? */
- public final boolean IsLcid() {
- return (Attributes & ParameterAttributes.Lcid) != 0;
- }
-
- /** Is this a return value? */
- public final boolean IsRetval() {
- return (Attributes & ParameterAttributes.Retval) != 0;
- }
-
- /** Is this an optional parameter? */
- public final boolean IsOptional() {
- return (Attributes & ParameterAttributes.Optional) != 0;
- }
-
- //##########################################################################
- // members not part of the public Reflection.ParameterInfo interface
-
- /** Initializes a new instance of the ParameterInfo class. */
- protected ParameterInfo(String name, Type type, int attr, int pos) {
- Name = name;
- ParameterType = type;
- Attributes = (short)attr;
- Position = pos;
- }
-
- public String toString() {
- return ParameterAttributes.toString(Attributes) + ParameterType + " "
- + Name;
- }
-
- //##########################################################################
-
-} // class ParameterInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java b/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java
deleted file mode 100644
index b19fe29869..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PrimitiveType.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package ch.epfl.lamp.compiler.msil;
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod;
-
-public final class PrimitiveType extends Type {
- public PrimitiveType(Module module,
- int attributes,
- String fullName,
- Type baseType,
- Type[] interfaces,
- Type declType,
- int auxAttr,
- Type elemType) {
- super(module, attributes, fullName,
- baseType, interfaces, declType, auxAttr, elemType);
- clearMembers();
- }
-
- public void clearMembers() {
- fields = FieldInfo.EMPTY_ARRAY;
- methods = MethodInfo.EMPTY_ARRAY;
- constructors = ConstructorInfo.EMPTY_ARRAY;
- events = EventInfo.EMPTY_ARRAY;
-
- initBaseType();
- initInterfaces();
-
- initFields();
- initMethods();
- initEvents();
- initProperties();
- initNestedTypes();
- }
-
- public FieldInfo addField(String name, int attrs, Type fieldType) {
- PECustomMod fieldTypeWithMods = new PECustomMod(fieldType, null);
- FieldInfo res = new FieldInfo(name, this, attrs, fieldTypeWithMods, null);
- FieldInfo[] ms = new FieldInfo[fields.length + 1];
- System.arraycopy(fields, 0, ms, 0, fields.length);
- ms[ms.length - 1] = res;
- fields = ms;
- return res;
- }
-
- public MethodInfo addMethod(String name, int attrs, Type returnType, Type[] paramTypes) {
- MethodInfo res = new MethodInfo(name, this, attrs, returnType, paramTypes);
- MethodInfo[] ms = new MethodInfo[methods.length + 1];
- System.arraycopy(methods, 0, ms, 0, methods.length);
- ms[ms.length - 1] = res;
- return res;
- }
-
- public ConstructorInfo addConstructor(int attrs, Type[] paramTypes) {
- ConstructorInfo res = new ConstructorInfo(this, attrs, paramTypes);
- ConstructorInfo[] ms = new ConstructorInfo[constructors.length + 1];
- System.arraycopy(constructors, 0, ms, 0, constructors.length);
- ms[ms.length - 1] = res;
- return res;
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
deleted file mode 100644
index b1bec64aff..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyAttributes.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Attributes applcicable to properties
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PropertyAttributes {
-
- // makes the class uninstantiable
- private PropertyAttributes() {}
-
- //##########################################################################
-
- /** Specifies that the property is special, with the name describing
- * how the property is special.
- */
- public static final short SpecialName = 0x0200;
-
- /** Specifies that the metadata internal APIs check the name encoding.
- */
- public static final short RTSpecialName = 0x0400;
-
- /** Specifies that the property has a default value.
- */
- public static final short HasDefault = 0x1000;
-
- //##########################################################################
-
- public static String toString(short attrs) {
- StringBuffer str = new StringBuffer();
- if ((attrs & SpecialName) != 0) str.append("specialname ");
- if ((attrs & RTSpecialName) != 0) str.append("rtspecialname ");
- return str.toString();
- }
-
- //##########################################################################
-
-} // class PropertyAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java b/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
deleted file mode 100644
index 4b7cef8bc1..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/PropertyInfo.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Discovers the attributes of a property
- * and provides access to property metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public class PropertyInfo extends MemberInfo {
-
- //##########################################################################
-
- public final int MemberType() { return MemberTypes.Property; }
-
- public final short Attributes;
-
- public final boolean CanRead;
-
- public final boolean CanWrite;
-
- public final Type PropertyType;
-
- /** Returns an array of the public get and set accessors for this property.
- */
- public MethodInfo[] GetAccessors() {
- return GetAccessors(false);
- }
-
- /** Returns an array of the public or non-public <b>get</b>
- * and <b>set</b> accessors for this property.
- */
- public MethodInfo[] GetAccessors(boolean nonPublic) {
- MethodInfo getter = GetGetMethod(nonPublic);
- MethodInfo setter = GetSetMethod(nonPublic);
- if (getter == null)
- if (setter == null) return MethodInfo.EMPTY_ARRAY;
- else return new MethodInfo[]{setter};
- else if (setter == null) return new MethodInfo[] {getter};
- else return new MethodInfo[] {getter, setter};
- }
-
- /** Returns the public <b>get</b> accessor for this property.
- */
- public MethodInfo GetGetMethod() {
- return GetGetMethod(false);
- }
-
- /** Returns the public or non-public <b>get</b> accessor for this property.
- */
- public MethodInfo GetGetMethod(boolean nonPublic) {
- return nonPublic ? getter
- : getter == null || getter.IsPublic() ? getter : null;
- }
-
- /** Returns the public <b>set</b> accessor for this property.
- */
- public MethodInfo GetSetMethod() {
- return GetSetMethod(false);
- }
-
- /** Returns the public or non-public <b>set</b> accessor for this property.
- */
- public MethodInfo GetSetMethod(boolean nonPublic) {
- return nonPublic ? setter
- : setter == null || setter.IsPublic() ? setter : null;
- }
-
- public String toString() {
- MethodInfo m = getter != null ? getter : setter;
- return MethodAttributes.accessFlagsToString
- ((getter != null ? getter : setter).Attributes)
- + " " + PropertyAttributes.toString(Attributes)
- + DeclaringType + "::" + Name;
- }
-
- //##########################################################################
- // protected members
-
- protected static final PropertyInfo[] EMPTY_ARRAY = new PropertyInfo[0];
-
- protected MethodInfo getter;
- protected MethodInfo setter;
-
- protected PropertyInfo(String name, Type declType, short attr,
- Type propType, MethodInfo getter, MethodInfo setter)
- {
- super(name, declType);
- Attributes = attr;
- PropertyType = propType;
- this.getter = getter;
- this.setter = setter;
- CanRead = getter != null;
- CanWrite = setter != null;
- }
-
- //##########################################################################
-
-} // class PropertyInfo
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Type.java b/src/msil/ch/epfl/lamp/compiler/msil/Type.java
deleted file mode 100644
index 830632ce45..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Type.java
+++ /dev/null
@@ -1,1142 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.Arrays;
-
-/**
- * Represents type declarations: class types, interface types, array types,
- * value types, and enumeration types.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Type extends MemberInfo {
-
- private java.util.List /* GenericParamAndConstraints */ tVars = new java.util.LinkedList();
- private GenericParamAndConstraints[] sortedTVars = null;
-
- public void addTVar(GenericParamAndConstraints tvarAndConstraints) {
- sortedTVars = null;
- tVars.add(tvarAndConstraints);
- }
-
- public GenericParamAndConstraints[] getSortedTVars() {
- if(sortedTVars == null) {
- sortedTVars = new GenericParamAndConstraints[tVars.size()];
- for (int i = 0; i < sortedTVars.length; i ++){
- Iterator iter = tVars.iterator();
- while(iter.hasNext()) {
- GenericParamAndConstraints tvC = (GenericParamAndConstraints)iter.next();
- if(tvC.Number == i) {
- sortedTVars[i] = tvC;
- }
- }
- }
- }
- return sortedTVars;
- }
-
-
- //##########################################################################
- // public static members
-
- /** Empty array of type Type. */
- public static final Type[] EmptyTypes = new Type[0];
-
- /** Separates names in the namespace of the Type. */
- public static final char Delimiter = '.';
-
- //##########################################################################
- // public properties
-
- /** The fully qualified name of the Type. */
- public final String FullName;
-
- /** The namespace of the Type. */
- public final String Namespace;
-
- /** The type from which the current Type directly inherits. */
- public final Type BaseType() {
- initBaseType();
- return baseType;
- }
- protected Type baseType;
-
- /** The attributes associated with the Type. */
- public final int Attributes;
-
- /** The sssembly that the type is declared in. */
- public final Assembly Assembly() { return Module.Assembly; }
-
- /** The module (the EXE/DLL) in which the current Type is defined. */
- public final Module Module;
-
- public final int MemberType() {
- return DeclaringType == null
- ? MemberTypes.TypeInfo : MemberTypes.NestedType;
- }
-
- //##########################################################################
- // internal members
-
- // Fields declared by this class
- protected FieldInfo[] fields;
-
- // Methods declared by this class
- protected MethodInfo[] methods;
-
- // Constructors of this class
- protected ConstructorInfo[] constructors;
-
- // Properties of the class
- protected PropertyInfo[] properties;
-
- // Events of the class
- protected EventInfo[] events;
-
- // Interfaces implemented by this class
- protected Type[] interfaces;
-
- // Nested types declared by this class
- protected Type[] nestedTypes;
-
- // holds the element type of array, pointer and byref types
- private final Type elemType;
-
- // the underlying type of an enumeration. null if the type is not enum.
- protected Type underlyingType;
-
- protected int auxAttr;
-
- //##########################################################################
- // Map with all the types known so far and operations on it
-
- private static final Map types = new HashMap();
-
- protected static Type getType(String name) {
- return (Type) types.get(name);
- }
-
- protected static Type addType(Type t) {
- assert(!(t instanceof TMVarUsage));
- assert(!(t instanceof ConstructedType));
- Type oldType = (Type) types.put(t.FullName, t);
-// if (oldType != null)
-// throw new RuntimeException("The type: [" + t.Assembly + "]" + t
-// + " replaces the type: [" +
-// oldType.Assembly + "]" + oldType);
- return t;
- }
-
- //##########################################################################
-
- /** The main constructor. */
- protected Type(Module module,
- int attr,
- String fullName,
- Type baseType,
- Type[] interfaces,
- Type declType,
- int auxAttr,
- Type elemType)
- {
- super(fullName.lastIndexOf(Delimiter) < 0 ? fullName :
- fullName.substring(fullName.lastIndexOf(Delimiter) + 1,
- fullName.length()),
- declType);
-
- Module = module; // null only for TMVarUsage and for PrimitiveType
- Attributes = attr;
- this.baseType = baseType;
- if (DeclaringType == null) {
- FullName = fullName;
- int i = FullName.lastIndexOf(Delimiter);
- Namespace = (i < 0) ? "" : FullName.substring(0,i);
- } else {
- FullName = declType.FullName + "+" + fullName;
- Namespace = DeclaringType.Namespace;
- }
-
- this.interfaces = interfaces;
- this.elemType = elemType;
- this.auxAttr = auxAttr;
- }
-
- public final boolean IsAbstract() {
- return (Attributes & TypeAttributes.Abstract) != 0;
-
- }
- public final boolean IsPublic() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.Public;
- }
-
- public final boolean IsNotPublic() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NotPublic;
- }
-
- public final boolean IsNestedPublic() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedPublic;
- }
-
- public final boolean IsNestedPrivate() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedPrivate;
- }
-
- public final boolean IsNestedFamily() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedFamily;
- }
-
- public final boolean IsNestedAssembly() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedAssembly;
- }
-
- public final boolean IsNestedFamORAssem() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedFamORAssem;
- }
-
- public final boolean IsNestedFamANDAssem() {
- return (Attributes & TypeAttributes.VisibilityMask)
- == TypeAttributes.NestedFamANDAssem;
- }
-
- public final boolean IsSealed() {
- return (Attributes & TypeAttributes.Sealed) != 0;
- }
-
- public final boolean IsSpecialName() {
- return (Attributes & TypeAttributes.SpecialName) != 0;
- }
-
- public final boolean IsClass() {
- return (Attributes & TypeAttributes.ClassSemanticsMask)
- == TypeAttributes.Class;
- }
-
- public final boolean IsInterface(){
- return (Attributes & TypeAttributes.ClassSemanticsMask)
- == TypeAttributes.Interface;
- }
-
- public final boolean IsAutoLayout() {
- return (Attributes & TypeAttributes.LayoutMask)
- == TypeAttributes.AutoLayout;
- }
- public final boolean IsExplictitLayout() {
- return (Attributes & TypeAttributes.LayoutMask)
- == TypeAttributes.ExplicitLayout;
- }
- public final boolean IsLayoutSequential() {
- return (Attributes & TypeAttributes.LayoutMask)
- == TypeAttributes.SequentialLayout;
- }
-
- public final boolean IsImport() {
- return (Attributes & TypeAttributes.Import) != 0;
- }
- public final boolean IsSerializable() {
- return (Attributes & TypeAttributes.Serializable) != 0;
- }
-
- public final boolean IsAnsiClass() {
- return (Attributes & TypeAttributes.StringFormatMask)
- == TypeAttributes.AnsiClass;
- }
-
- public final boolean IsUnicodeClass() {
- return (Attributes & TypeAttributes.StringFormatMask)
- == TypeAttributes.UnicodeClass;
- }
- public final boolean IsAutoClass() {
- return (Attributes & TypeAttributes.StringFormatMask)
- == TypeAttributes.AutoClass;
- }
-
- public final boolean IsArray() {
- return (auxAttr & AuxAttr.Array) != 0;
- }
- public final boolean IsByRef() {
- return (auxAttr & AuxAttr.ByRef) != 0;
- }
- public final boolean IsPointer() {
- return (auxAttr & AuxAttr.Pointer) != 0;
- }
- public final boolean IsPrimitive() {
- return (auxAttr & AuxAttr.Primitive) != 0;
- }
- public final boolean IsValueType() {
- return BaseType() == VALUE_TYPE() || IsEnum();
- }
- public final boolean IsEnum() {
- return BaseType() == ENUM();
- }
- public boolean CanBeTakenAddressOf() {
- /* TODO should be overridden in TMVarUsage,
- but there's currently no way to bind a TMVarUsage to its GenericParamAndConstraints definition. Why?
- Because of the way the msil library is organized (e.g., mkArray() returns the same !0[] representation
- for all !0[] usages, irrespective of the scope of the !0 type-param)
- This in turn is so because without generics there's no harm in using a type-def instance
- where a type-ref should go (e.g., the ParameterType of a ParameterInfo nowadays may point to a PEType).
- The net effect is that this method (CanBeTakenAddressOf) is conservative, it will answer "no"
- for example for !0 where !0 refers to a type-param with the isValuetype constraint set.
- The whole thing is ok at this point in time, where generics are not supported at the backend. */
- return IsValueType() && (this != ENUM());
- /* ENUM() is a singleton, i.e. System.Enum is not generic */
- }
-
- /** IsGeneric, true for a PEType or TypeBuilder (i.e., a type definition)
- * containing one or more type params. Not to be called on a reference
- * to a constructed type. */
- public final boolean IsGeneric() {
- return tVars.size() > 0;
- }
-
- public final boolean HasElementType() {
- return IsArray() || IsPointer() || IsByRef();
- }
-
- public boolean IsTMVarUsage() {
- // overridden in TMVarUsage
- return false;
- }
-
- public boolean IsNestedType() {
- return DeclaringType != null;
- }
-
- public boolean IsDefinitelyInternal() {
- if(IsNestedType()) {
- return IsNestedPrivate();
- } else {
- return IsNotPublic();
- }
- }
-
- //public final boolean IsCOMObject;
- //public final boolean IsContextful;
- //public final boolean IsMarshalByRef;
-
- protected Type(Module module,
- int attr,
- String fullName,
- Type baseType,
- Type[] interfaces,
- Type declType,
- int auxAttr)
- {
- this(module, attr, fullName, baseType, interfaces,
- declType, auxAttr, null);
- }
-
- //##########################################################################
-
- public static final class TMVarUsage extends Type {
-
- public final int Number;
- public final boolean isTVar;
-
- /** Non-defining reference to either a TVar or an MVar.
- * An instance of GenericParamAndConstraints represents a TVar or an MVar definition. */
- public TMVarUsage(int Number, boolean isTVar) {
- super(null, 0, ((isTVar ? "!" : "!!") + Number), null, null, null, AuxAttr.None, null);
- this.Number = Number;
- this.isTVar = isTVar;
- }
-
- public String toString() {
- return (isTVar ? "!" : "!!") + Number;
- }
-
- public final boolean IsTMVarUsage() {
- return true;
- }
-
- public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
-
- TMVarUsage that = (TMVarUsage) o;
-
- if (Number != that.Number) return false;
- if (isTVar != that.isTVar) return false;
-
- return true;
- }
-
- public int hashCode() {
- int result = Number;
- result = 31 * result + (isTVar ? 1 : 0);
- return result;
- }
- }
-
- protected static final class AuxAttr {
- public static final int None = 0x0000;
- public static final int Array = 0x0001;
- public static final int ByRef = 0x0002;
- public static final int Pointer = 0x0008;
- public static final int Primitive = 0x0010;
- }
-
- /***/
- public static Type mkArray(Type elemType, int rank) {
- StringBuffer arrSig = new StringBuffer("[");
- for (int i = 0; i < rank; i++) {
- if (i > 0) arrSig.append(',');
- }
- arrSig.append(']');
- Type array = getType(elemType.FullName + arrSig);
- if (array != null)
- return array;
- array = new PrimitiveType(elemType.Module,
- elemType.Attributes
- | TypeAttributes.Sealed
- | TypeAttributes.Serializable,
- elemType.FullName + arrSig,
- ARRAY(), EmptyTypes, null,
- AuxAttr.Array, elemType);
- return addType(array);
- }
-
- /***/
- public static Type mkPtr(Type elemType) {
- String name = elemType.FullName + "*";
- Type type = getType(name);
- if (type != null) return type;
- type = new PrimitiveType(elemType.Module,
- elemType.Attributes,
- name, null, EmptyTypes, null,
- AuxAttr.Pointer, elemType);
- return addType(type);
- }
-
- /***/
- public static Type mkByRef(Type elemType) {
- String name = elemType.FullName + "&";
- Type type = getType(name);
- if (type != null) return type;
- type = new PrimitiveType(elemType.Module,
- elemType.Attributes,
- name, null, EmptyTypes, null,
- AuxAttr.ByRef, elemType);
- return addType(type);
- }
-
- //##########################################################################
- // public methods
-
- /**
- * Return the type with the specified signature parameters.
- * For example, the fully qualified name for a class might look like this:
- * TopNamespace.SubNameSpace.ContainingClass+NestedClass,MyAssembly
- */
- public static Type GetType(String fullName) {
- Type type = getType(fullName);
- if (type != null) return type;
-
- // check if it's an array type; TODO: make array type handling more robust
- int i = fullName.lastIndexOf('[');
- int j = fullName.lastIndexOf(']');
- if (i >= 0)
- if (j > i && j == (fullName.length() - 1)) {
- String elementTypeName = fullName.substring(0, i);
- Type elementType = GetType(elementTypeName);
- if (elementType == null)
- throw new RuntimeException
- ("Unknown element type '" + elementTypeName +
- "' for the array type: " + fullName);
- int rank = j - i;
- for (int k = i + 1; k < j; k++) {
- if (fullName.charAt(k) != ',')
- throw new RuntimeException
- ("Malformed type name: " + fullName);
- }
- return mkArray(elementType, rank);
- } else
- throw new RuntimeException("Malformed type name: " + fullName);
-
- // check if it's a pointer type
- if (fullName.charAt(fullName.length() - 1) == '*')
- return addType
- (mkPtr(GetType(fullName.substring(0, fullName.length()-1))));
-
- // check if it's a nested class
- i = fullName.lastIndexOf('+');
- if (i > 0) {
- if (i == 0 || i == (fullName.length() - 1))
- throw new RuntimeException("malformedTypeName");
- Type enclosing = GetType(fullName.substring(0, i));
- return enclosing == null ? null
- : enclosing.GetNestedType(fullName.substring(i + 1));
- }
-
- //System.out.println("Looking for type: " + fullName + " (" + fullName.length() + ")");
- // try in the assemblies
- Iterator assems = ch.epfl.lamp.compiler.msil.Assembly.
- assemblies.values().iterator();
- while (type == null && assems.hasNext()) {
- Assembly assem = ((Assembly) assems.next());
- type = assem.GetType(fullName);
- //System.out.println("\tin assemby " + assem + " -> " + type);
- }
-
- Type type2 = getType(fullName);
- if (type == type2) return type;
- return type == null ? null : addType(type);
- }
-
- /**
- * @return the type of the object encompassed or referenced to
- * by the current array, pointer or reference type.
- */
- public Type GetElementType() {
- return elemType;
- }
-
- /**
- * @return the type underlying an enumeration type.
- */
- public Type getUnderlyingType() {
- if (!IsEnum()) return null;
- // this would force the loading of the underlying type from the
- // the type of the value__ field of the enumeration
- initFields();
- return underlyingType;
- }
-
- //##########################################################################
- // GetField/s/
-
- /** Searches for the field with the specified name. */
- public FieldInfo GetField(String name) {
- initFields();
- for (int i = 0; i < fields.length; i++)
- if (fields[i].Name.equals(name) && !fields[i].IsPrivate())
- return fields[i];
- return null;
- }
-
- /**
- */
- public FieldInfo GetField(String name, int bindingFlags) {
- FieldInfo[] fields = this.GetFields(bindingFlags);
- for (int i = 0; i < fields.length; i++)
- if (name.equals(fields[i].Name))
- return fields[i];
- return null;
- }
-
- /** Gets the fields of the current Type. */
- public FieldInfo[] GetFields() {
- return GetFields(BindingFlags.Instance | BindingFlags.Public);
- }
-
- /**
- */
- public FieldInfo[] GetFields(int bindingFlags) {
- initFields();
- final FieldInfo[] fields =
- getAllFields((bindingFlags & BindingFlags.DeclaredOnly) != 0);
- final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
- final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
- final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
- final boolean getNonPublic =
- (bindingFlags & BindingFlags.NonPublic) != 0;
-
- int cnt = 0;
- for (int i = 0; i < fields.length; i++) {
- FieldInfo field = fields[i];
- boolean accessible = (getPublic && field.IsPublic())
- || (getNonPublic && !field.IsPublic());
- if (accessible
- // strip off the private fields up the hierarchy
- && ((field.DeclaringType == this)
- || ((field.DeclaringType != this) && !field.IsPrivate()))
- && ((getInstance && !field.IsStatic())
- || ((getStatic && field.IsStatic()) &&
- (field.DeclaringType == this
- || (bindingFlags & BindingFlags.FlattenHierarchy) != 0))
- )
- )
- fields[cnt++] = field;
- }
- FieldInfo [] resFields = new FieldInfo[cnt];
- System.arraycopy(fields, 0, resFields, 0, cnt);
- return resFields;
- }
-
- protected FieldInfo[] getAllFields(boolean declaredOnly) {
- initFields();
- FieldInfo [] inherited = BaseType() == null || declaredOnly
- ? FieldInfo.EMPTY_ARRAY
- : BaseType().getAllFields(declaredOnly);
- FieldInfo[] allFields =
- new FieldInfo[inherited.length + this.fields.length];
- System.arraycopy(inherited, 0, allFields, 0, inherited.length);
- System.arraycopy(this.fields, 0,
- allFields, inherited.length, this.fields.length);
- return allFields;
- }
-
- //##########################################################################
- // GetConstructor/s/
-
- /** Searches for a public instance constructor whose parameters
- * match the types in the specified array. */
- public ConstructorInfo GetConstructor(Type[] paramTypes) {
- initMethods();
- for (int i = 0; i < constructors.length; i++) {
- if (equalParameters(constructors[i].GetParameters(), paramTypes))
- return constructors[i];
- }
- return null;
- }
-
- /** Returns all public instance constructors defined for the current Type.*/
- public ConstructorInfo[] GetConstructors() {
- return GetConstructors(BindingFlags.Instance | BindingFlags.Public);
- }
-
- /***/
- public ConstructorInfo[] GetConstructors(int bindingFlags) {
- initMethods();
- final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
- final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
- final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
- final boolean getNonPublic =
- (bindingFlags & BindingFlags.NonPublic) != 0;
-
- ConstructorInfo[] constrs =
- new ConstructorInfo[this.constructors.length];
- int cnt = 0;
- for (int i = 0; i < this.constructors.length; i++) {
- ConstructorInfo constr = this.constructors[i];
- boolean accessible = (getPublic && constr.IsPublic())
- || (getNonPublic && !constr.IsPublic());
- if (accessible
- && ((getInstance && !constr.IsStatic())
- || (getStatic && constr.IsStatic())))
- constrs[cnt++] = constr;
- }
- ConstructorInfo [] resConstrs = new ConstructorInfo[cnt];
- System.arraycopy(constrs, 0, resConstrs, 0, cnt);
- return resConstrs;
- }
-
- //##########################################################################
- // GetMethod/s/
-
- /** Searches for the specified public method whose parameters
- * match the specified argument types. */
- public MethodInfo GetMethod(String name, Type[] paramTypes) {
- return GetMethod(name, paramTypes, null);
- }
-
- public MethodInfo GetMethod(String name, Type[] paramTypes, Type retType) {
- initMethods();
- MethodInfo method = findMethod(methods, name, paramTypes, retType);
- if (method != null)
- return method;
- if (BaseType() != null) {
- method = BaseType().GetMethod(name, paramTypes, retType);
- if (method != null)
- return method;
- }
-// StringBuffer str = new StringBuffer(name);
-// str.append('(');
-// for (int i = 0; i < paramTypes.length; i++) {
-// if (i > 0) str.append(", ");
-// str.append(paramTypes[i]);
-// }
-// str.append(')');
-// System.out.println("Cannot find method " + str + ":");
-// System.out.println("Methods of class " + this);
-// for (int i = 0; i < methods.length; i++)
-// System.out.println("\t" + methods[i]);
- return null;
- }
-
- /**
- */
- protected static MethodInfo findMethod(MethodInfo[] methods,
- String name,
- Type[] paramTypes,
- Type retType)
- {
- for (int i = 0; i < methods.length; i++)
- if (name.equals(methods[i].Name)
- && equalParameters(methods[i].GetParameters(), paramTypes)
- && (retType == null || methods[i].ReturnType == retType))
- return methods[i];
- return null;
- }
-
- /**
- */
- protected static boolean equalParameters(ParameterInfo[] params,
- Type[] paramTypes)
- {
- if (params.length != paramTypes.length)
- return false;
- for (int i = 0; i < params.length; i++) {
-// System.out.println(params[i].ParameterType + " == " + paramTypes[i]
-// + " = " + (params[i].ParameterType == paramTypes[i]));
- if (params[i].ParameterType != paramTypes[i])
- return false;
- }
- return true;
- }
-
- /**
- */
- public MethodInfo GetMethod(String name, Type[] paramTypes, int bindingFlags) {
- MethodInfo[] methods = GetMethods(bindingFlags);
- MethodInfo method = findMethod(methods, name, paramTypes, null);
- if (method == null) {
- StringBuffer str = new StringBuffer(name);
- str.append('(');
- for (int i = 0; i < paramTypes.length; i++) {
- if (i > 0) str.append(", ");
- str.append(paramTypes[i]);
- }
- str.append(')');
- System.out.println("Cannot find method " + str + ":");
- System.out.println("Methods of class " + this);
- for (int i = 0; i < methods.length; i++)
- System.out.println("\t" + methods[i]);
- }
- return method;
- }
-
- /** Returns all public methods of the current Type. */
- public MethodInfo[] GetMethods() {
- return GetMethods(BindingFlags.Instance | BindingFlags.Public);
- }
-
- /**
- */
- public MethodInfo[] GetMethods(int bindingFlags) {
- initMethods();
- final MethodInfo[] methods =
- getAllMethods((bindingFlags & BindingFlags.DeclaredOnly) != 0);
- //System.out.println("" + this + ".GetMethods(int) -> " + methods.length);
- final boolean getInstance = (bindingFlags & BindingFlags.Instance) != 0;
- final boolean getStatic = (bindingFlags & BindingFlags.Static) != 0;
- final boolean getPublic = (bindingFlags & BindingFlags.Public) != 0;
- final boolean getNonPublic =
- (bindingFlags & BindingFlags.NonPublic) != 0;
-
- int cnt = 0;
- for (int i = 0; i < methods.length; i++) {
- MethodInfo method = methods[i];
- boolean accessible = (getPublic && method.IsPublic())
- || (getNonPublic && !method.IsPublic());
- if (accessible
- // strip off the private methods up the hierarchy
- && ((method.DeclaringType == this)
- || ((method.DeclaringType != this) && !method.IsPrivate()))
- && ((getInstance && !method.IsStatic())
- || ((getStatic && method.IsStatic()) &&
- (method.DeclaringType == this
- || (bindingFlags & BindingFlags.FlattenHierarchy) != 0))
- )
- )
- methods[cnt++] = method;
- }
- MethodInfo [] resMethods = new MethodInfo[cnt];
- System.arraycopy(methods, 0, resMethods, 0, cnt);
- return resMethods;
- }
-
- protected MethodInfo[] getAllMethods(boolean declaredOnly) {
- initMethods();
- MethodInfo[] inherited = BaseType() == null || declaredOnly
- ? MethodInfo.EMPTY_ARRAY
- : BaseType().getAllMethods(declaredOnly);
- MethodInfo[] allMethods =
- new MethodInfo[inherited.length + this.methods.length];
- System.arraycopy(inherited, 0, allMethods, 0, inherited.length);
- System.arraycopy(this.methods, 0,
- allMethods, inherited.length, this.methods.length);
- return allMethods;
- }
-
- //##########################################################################
- // GetProperty/ies/
-
- /** Returns all public properties of the current Type.
- */
- public PropertyInfo[] GetProperties() {
- initProperties();
- return (PropertyInfo[]) properties.clone();
- }
-
- /** Returns the properties of the current class
- * that satisfy the binding constrints.
- */
- public PropertyInfo[] GetProperties(int bindingFlags) {
- initProperties();
- return (PropertyInfo[]) properties.clone();
- }
-
- /** Returns the public property with the given name.
- */
- public PropertyInfo GetProperty(String name) {
- initProperties();
- for (int i = 0; i < properties.length; i++)
- if (name.equals(properties[i].Name))
- return properties[i];
- return null;
- }
-
- /** Returns the property with the given name
- * that satisfies the binding constraints.
- */
- public PropertyInfo GetProperty(String name, int bindingFlags) {
- throw new RuntimeException("Method not implemented yet");
- }
-
- //##########################################################################
- // GetEvent(s)
-
- public EventInfo[] GetEvents() {
- initEvents();
- return (EventInfo[]) events.clone();
- }
-
- //##########################################################################
- // GetNestedType/s/
-
- /** Searches for nested type with the specified name. */
- public Type GetNestedType(String name) {
- initNestedTypes();
- for (int i = 0; i < nestedTypes.length; i++)
- if (nestedTypes[i].Name.equals(name))
- return nestedTypes[i];
- return null;
- }
-
- /** Returns all types nested within the current Type. */
- public Type[] GetNestedTypes() {
- initNestedTypes();
- return (Type[]) nestedTypes.clone();
- }
-
- //##########################################################################
- // GetInterface/s/
-
- /** Searches for an Interface with the given name implemented by this type
- */
- public Type GetInterface(String name) {
- return GetInterface(name, false);
- }
-
- /** Searches for the specified interface,
- * specifying whether to do a case-sensitive search.
- * @param name - the name of the interface to get
- * @param ignoreCase <b>true</b> to perform a case-insensitive search for name
- * <b>false</b> to perform a case-sensitive search for name
- * @return A Type object representing the interface with the specified name,
- * implemented or inherited by the current Type, if found;
- * otherwise, a null reference
- */
- public Type GetInterface(String name, boolean ignoreCase) {
- initInterfaces();
- for (int i = 0; i < interfaces.length; i++) {
- Type iface = interfaces[i];
- if (ignoreCase) {
- if (name.equalsIgnoreCase(iface.Name)) return iface;
- if (name.equalsIgnoreCase(iface.FullName)) return iface;
- } else {
- if (name.equals(iface.Name)) return iface;
- if (name.equals(iface.FullName)) return iface;
- }
- }
- return BaseType() == null ? null
- : BaseType().GetInterface(name, ignoreCase);
- }
-
- /** Returns the interfaces implemented or inherited by the current Type. */
- public Type[] GetInterfaces() {
- initInterfaces();
- if (BaseType() == null) return interfaces;
-
- Type[] ifaces = interfaces;
- int count = 0;
- for (int i = 0; i < interfaces.length; i++) {
- if (BaseType().GetInterface(interfaces[i].FullName) == null)
- ifaces[count++] = ifaces[i];
- }
- Type[] baseTypeIfaces = BaseType().GetInterfaces();
-
- Type[] res = new Type[baseTypeIfaces.length + count];
- System.arraycopy(baseTypeIfaces, 0, res, 0, baseTypeIfaces.length);
- System.arraycopy(ifaces, 0, res, baseTypeIfaces.length, count);
-
- return res;
- }
-
-
- public boolean isSubtypeOf(Type that) {
- if (this == that || BaseType() == that || that == OBJECT()) return true;
- initInterfaces();
- for (int i = 0; i < interfaces.length; i++)
- if (interfaces[i].isSubtypeOf(that))
- return true;
- boolean res = BaseType() == null ? false : BaseType().isSubtypeOf(that);
-// if (!res) {
-// System.out.println(dumpType(this) + " not a subtype of " +
-// dumpType(that));
-// }
- return res;
- }
-
- private static String formatType(Type t) {
- if (t == null) return "<null>";
- String cname = t.getClass().getName();
- int k = cname.lastIndexOf(".");
- if (k >= 0)
- cname = cname.substring(k + 1);
- return "[" + t.Assembly().GetName() + "]" + t +
- "(" + cname + "#" + Integer.toHexString(t.hashCode()) + ")";
- }
- private static String dumpType(Type t) {
- StringBuffer str = new StringBuffer();
- str.append(formatType(t) + " : ");
- str.append(formatType(t.BaseType()));
- Type[] ifaces = t.GetInterfaces();
- for (int i = 0; i < ifaces.length; i++)
- str.append(", " + formatType(ifaces[i]));
- return str.toString();
- }
-
- //##########################################################################
- // GetMember/s/
-
- protected MemberInfo[] members;
-
- public MemberInfo[] GetMember(String name) {
- aggregateMembers();
- List l = new ArrayList();
- for (int i = 0; i < members.length; i++) {
- if (name.equals(members[i].Name))
- l.add(members[i]);
- }
- return (MemberInfo[])l.toArray(MemberInfo.EMPTY_ARRAY);
- }
-
- protected void aggregateMembers() {
- if (members != null)
- return;
- initFields();
- initMethods();
- initProperties();
- initNestedTypes();
- // the List returned by Arrays.asList doesn't support the addAll method
- // so we have to wrap it in ArrayList
- List l = new ArrayList(Arrays.asList(fields));
- l.addAll(Arrays.asList(constructors));
- l.addAll(Arrays.asList(methods));
- l.addAll(Arrays.asList(properties));
- l.addAll(Arrays.asList(nestedTypes));
- members = (MemberInfo[]) l.toArray(MemberInfo.EMPTY_ARRAY);
- }
-
- //##########################################################################
- // non-standard methods that return only members declared in this type
-
- /**
- * Return only the fields declared in this type.
- */
- public FieldInfo[] getFields() {
- initFields();
- FieldInfo[] fields = new FieldInfo[this.fields.length];
- System.arraycopy(this.fields, 0, fields, 0, fields.length);
- return fields;
- }
-
- /**
- * Return only the conrtuctors declared in this type.
- */
- public ConstructorInfo[] getConstructors() {
- initMethods();
- ConstructorInfo[] ctors = new ConstructorInfo[constructors.length];
- System.arraycopy(constructors, 0, ctors, 0, ctors.length);
- return ctors;
- }
-
- /**
- * Return only the methods declared in this type.
- */
- public MethodInfo[] getMethods() {
- initMethods();
- MethodInfo[] methods = new MethodInfo[this.methods.length];
- System.arraycopy(this.methods, 0, methods, 0, methods.length);
- return methods;
- }
-
- /**
- * Return only the properties declared in this type.
- */
- public PropertyInfo[] getProperties() {
- initProperties();
- PropertyInfo[] props = new PropertyInfo[properties.length];
- System.arraycopy(properties, 0, props, 0, props.length);
- return props;
- }
-
- /**
- * Return only the interfaces directly implemented by this type.
- */
- public Type[] getInterfaces() {
- initInterfaces();
- Type[] ifaces = new Type[interfaces.length];
- System.arraycopy(interfaces, 0, ifaces, 0, ifaces.length);
- return ifaces;
- }
-
- /**
- * Return the types declared in this type.
- */
- public Type[] getNestedTypes() {
- initNestedTypes();
- Type[] nested = new Type[nestedTypes.length];
- System.arraycopy(nestedTypes, 0, nested, 0, nested.length);
- return nested;
- }
-
- //##########################################################################
-
- public String toString() {
- return FullName;
- }
-
- //##########################################################################
- // lazy type construction members
-
- private boolean initBaseType = true;
- protected final void initBaseType() {
- if (initBaseType) {
- loadBaseType();
- initBaseType = false;
- }
- }
- protected void loadBaseType() {}
-
- private boolean initInterfaces = true;
- protected void initInterfaces() {
- if (initInterfaces) {
- loadInterfaces();
- initInterfaces = false;
- }
- assert interfaces != null : "In type " + this;
- }
- protected void loadInterfaces() {}
-
- private boolean initNestedTypes = true;
- protected void initNestedTypes() {
- if (initNestedTypes) {
- loadNestedTypes();
- initNestedTypes = false;
- }
- assert nestedTypes != null : "In type " + this;
- }
- protected void loadNestedTypes() {}
-
- private boolean initFields = true;
- protected void initFields() {
- if (initFields) {
- loadFields();
- initFields = false;
- }
- assert fields != null : "In type " + this;
- }
- protected void loadFields() {}
-
- private boolean initMethods = true;
- protected void initMethods() {
- if (initMethods) {
- loadMethods();
- initMethods = false;
- }
- assert constructors != null : "In type " + this;
- assert methods != null : "In type " + this;
- }
- protected void loadMethods() {}
-
- private boolean initProperties = true;
- protected void initProperties() {
- if (initProperties) {
- initMethods();
- loadProperties();
- initProperties = false;
- }
- assert properties != null : "In type " + this;
- }
- protected void loadProperties() {}
-
- private boolean initEvents = true;
- protected void initEvents() {
- if (initEvents) {
- initMethods();
- loadEvents();
- initEvents = false;
- }
- assert events != null : "In type " + this;
- }
- protected void loadEvents() {}
-
- //##########################################################################
-
- //##########################################################################
- // static members
-
- private static Assembly MSCORLIB;
- private static Module MSCORLIB_DLL;
-
- public static Type OBJECT() { return __OBJECT; }
- public static Type STRING() { return __STRING; }
- public static Type ARRAY() { return __ARRAY; }
- public static Type VOID() { return __VOID; }
- public static Type ENUM() { return __ENUM; }
- public static Type VALUE_TYPE() { return __VALUE_TYPE; }
-
- private static Type __OBJECT;
- private static Type __STRING;
- private static Type __ARRAY;
- private static Type __VOID;
- private static Type __ENUM;
- private static Type __VALUE_TYPE;
-
- public static void initMSCORLIB(Assembly mscorlib) {
- if (MSCORLIB != null)
- throw new RuntimeException("mscorlib already initialized");
- MSCORLIB = mscorlib;
- MSCORLIB_DLL = MSCORLIB.GetModules()[0];
-
- __OBJECT = mscorlib.GetType("System.Object");
- __STRING = mscorlib.GetType("System.String");
- __ARRAY = mscorlib.GetType("System.Array");
- __VOID = mscorlib.GetType("System.Void");
- __ENUM = mscorlib.GetType("System.Enum");
- __VALUE_TYPE = mscorlib.GetType("System.ValueType");
- }
-
- //##########################################################################
-
-} // class Type
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java b/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
deleted file mode 100644
index 8f489fa46f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/TypeAttributes.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-/**
- * Specifies type attributes.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class TypeAttributes {
-
- //##########################################################################
- // Visibilty attributes
-
- /** Bitmask used to retrieve visibility information. */
- public static final int VisibilityMask = 0x00000007;
-
- /** Class has no public scope. */
- public static final int NotPublic = 0x00000000;
-
- /** Class has public scope. */
- public static final int Public = 0x00000001;
-
- /** Class is nested with public visibility. */
- public static final int NestedPublic = 0x00000002;
-
- /** Class is nested with private visibility. */
- public static final int NestedPrivate = 0x00000003;
-
- /** Class is nested with family visibility, and is thus accessible
- * only by methods within its own type and any subtypes. */
- public static final int NestedFamily = 0x00000004;
-
- /** Class is nested with assembly visibility, and is thus accessible
- * only by methods within its assembly. */
- public static final int NestedAssembly = 0x00000005;
-
- /** Class is nested with assembly and family visibility, and is thus accessible
- * only by methods lying in the intersection of its family and assembly. */
- public static final int NestedFamANDAssem = 0x00000006;
-
- /** Class is nested with family or assembly visibility, and is thus accessible
- * only by methods lying in the union of its family and assembly. */
- public static final int NestedFamORAssem = 0x00000007;
-
- //##########################################################################
- // Class layout attributes
-
- /** Bitmask used to retrieve class layout information. */
- public static final int LayoutMask = 0x00000018;
-
- /** Class fields are automatically laid out by the CLR. */
- public static final int AutoLayout = 0x00000000;
-
- /** Class fields are laid out sequentially, in the order that the fields
- * were emitted to the metadata. */
- public static final int SequentialLayout = 0x00000008;
-
- /** Class fields are laid out at the specified offsets. */
- public static final int ExplicitLayout = 0x00000010;
-
- //##########################################################################
- // Class semantics attributes
-
- /** Bitmask used to retrieve class semantics information. */
- public static final int ClassSemanticsMask = 0x00000020;
-
- /** Type is a class. */
- public static final int Class = 0x00000000;
-
- /** Type is an interface. */
- public static final int Interface = 0x00000020;
-
- //##########################################################################
- // Special semantics in addition to class semantics
-
- /** Class is abstract. */
- public static final int Abstract = 0x00000080;
-
- /** Class is cannot be extended. */
- public static final int Sealed = 0x00000100;
-
- /** Class is special in a way denoted by the name. */
- public static final int SpecialName = 0x00000400;
-
- //##########################################################################
- // Implementation attributes
-
- /** Class/interface is imported from another module. */
- public static final int Import = 0x00001000;
-
- /** Class can be serialized. */
- public static final int Serializable = 0x00002000;
-
- //##########################################################################
- // String formatting attributes
-
- /** Bitmask used to retrieve string information for native interop. */
- public static final int StringFormatMask = 0x00030000;
-
- /** LPTSTR is interpreted as ANSI. */
- public static final int AnsiClass = 0x00000000;
-
- /** LPTSTR is interpreted as UNICODE. */
- public static final int UnicodeClass = 0x00010000;
-
- /** LPTSTR is interpreted automatically. */
- public static final int AutoClass = 0x00020000;
-
- //##########################################################################
- // Class initialization attributes
-
- /** Initialize the class before first static field access. */
- public static final int BeforeFieldInit = 0x00100000;
-
- //##########################################################################
- // Additional flags
-
- /** CLI provides 'special' behavior, depending upon the name of the type. */
- public static final int RTSpecialName = 0x00000800;
-
- /** Type has security associate with it. */
- public static final int HasSecurity = 0x00040000;
-
- //##########################################################################
-
- public static String accessModsToString(int attrs) {
- switch (attrs & VisibilityMask) {
- case NotPublic: return "private";
- case Public: return "public";
- case NestedPublic: return "nested public";
- case NestedPrivate: return "nested private";
- case NestedFamily: return "nested family";
- case NestedAssembly: return "nested assembly";
- case NestedFamANDAssem: return "nested famandassem";
- case NestedFamORAssem: return "nested famorassem";
- default:
- throw new RuntimeException();
- }
- }
-
- /** Returns a string representation of the given attributes. */
- public static String toString(int attrs) {
- StringBuffer str = new StringBuffer(accessModsToString(attrs));
- switch (attrs & LayoutMask) {
- case AutoLayout: str.append(" auto"); break;
- case SequentialLayout: str.append(" sequential"); break;
- case ExplicitLayout: str.append(" explicit"); break;
- }
- switch (attrs & StringFormatMask) {
- case AnsiClass: str.append(" ansi"); break;
- case UnicodeClass: str.append(" unicode"); break;
- case AutoClass: str.append(" autochar"); break;
- }
- if ((attrs & Interface) != 0) str.append(" interface");
- if ((attrs & Abstract) != 0) str.append(" abstract");
- if ((attrs & Sealed) != 0) str.append(" sealed");
- if ((attrs & BeforeFieldInit) != 0) str.append(" beforefieldinit");
- if ((attrs & Serializable) != 0) str.append(" serializable");
- if ((attrs & SpecialName) != 0) str.append(" specialname");
- if ((attrs & RTSpecialName) != 0) str.append(" rtspecialname");
- return str.toString();
- }
-
- /***/
- public static final boolean isNested(int attrs) {
- switch (attrs & VisibilityMask) {
- case NestedPublic:
- case NestedPrivate:
- case NestedFamily:
- case NestedAssembly:
- case NestedFamANDAssem:
- case NestedFamORAssem:
- return true;
- default: return false;
- }
- }
-
- //##########################################################################
-
- // makes the class uninstantiable
- private TypeAttributes() {}
-
- //##########################################################################
-
-} // class TypeAttributes
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/Version.java b/src/msil/ch/epfl/lamp/compiler/msil/Version.java
deleted file mode 100644
index ad4b09b163..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/Version.java
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * System.Reflection-like API for access to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil;
-
-
-/**
- * Represents the version number for a common language runtime assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class Version {
-
- //##########################################################################
- // public interface
-
- /**
- * Gets the value of the major component of the version
- * number for this instance.
- */
- public final int Major;
-
- /**
- * Gets the value of the minor component of the version
- * number for this instance.
- */
- public final int Minor;
-
- /**
- * Gets the value of the build component of the version
- * number for this instance.
- */
- public final int Build;
-
- /**
- * Gets the value of the revision component of the version
- * number for this instance.
- */
- public final int Revision;
-
- /**
- * Initializes a new instance of the Version class.
- */
- public Version() {
- this(0,0,0,0);
- }
-
- /**
- * Initializes a new instance of the Version class with
- * the specified major, minor, build, and revision numbers.
- */
- public Version(int major, int minor, int build, int revision) {
- this.Major = major;
- this.Minor = minor;
- this.Build = build;
- this.Revision = revision;
- }
-
- /**
- * Converts the value of this instance to its equivalent String representation
- */
- public String toString() {
- return "" + Major + "." + Minor + "." + Build + "." + Revision;
- }
-
- //##########################################################################
-
-} // class Version
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
deleted file mode 100644
index 3110ccd1ce..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import java.io.IOException
-
-/**
- * Defines and represents a dynamic assembly.
- * A dynamic assembly is an assembly that is created using the compiler.msil
- * emit APIs. The dynamic modules in the assembly are saved when the dynamic
- * assembly is saved using the Save method. To generate an executable, the
- * SetEntryPoint method must be called to identify the method that is the
- * entry point to the assembly. Assemblies are saved as DLL by default,
- * unless SetEntryPoint requests the generation of a console application
- * or a Windows-based application.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class AssemblyBuilder(name: AssemblyName)
- extends Assembly(name)
- with ICustomAttributeSetter
- with Visitable
-{
- //##########################################################################
- // public methods
-
- /**
- * Defines a dynamic module with the given name that will be saved
- * to the specified file. No symbol information is emitted.
- */
- def DefineDynamicModule(name: String, fileName: String): ModuleBuilder = {
- val module = new ModuleBuilder(name, fileName, "" + null, this)
- addModule(name, module)
- return module
- }
-
- /** Returns the dynamic module with the specified name. */
- def GetDynamicModule(name: String): ModuleBuilder = {
- return GetModule(name).asInstanceOf[ModuleBuilder]
- }
-
- /** Saves this dynamic assembly to disk. */
- @throws(classOf[IOException])
- def Save(fileName: String) {
- generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
- ILPrinterVisitor.printAssembly(this, fileName)
- }
-
- @throws(classOf[IOException])
- def Save(destPath: String, sourceFilesPath: String) {
- generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
- ILPrinterVisitor.printAssembly(this, destPath, sourceFilesPath)
- }
-
- /** Returns the list of generated files from calling Save(). */
- def GetGeneratedFiles(): Array[String] = {
- return generatedFiles.toArray // (new Array[String](generatedFiles.size())).asInstanceOf[Array[String]]
- }
-
- /** Sets the entry point for this dynamic assembly. */
- def SetEntryPoint(entryMethod: MethodInfo) {
- EntryPoint = entryMethod
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
- // protected members
-
- // the access properties - Save, Run, RunAndSave
- private var access : Int = _
-
- // all extern assemblies used in this assembly builder
- protected var externAssemblies = scala.collection.mutable.Set.empty[Assembly]
-
- // register an extern assembly
- protected def registerExternAssembly(assembly: Assembly) {
- externAssemblies += assembly
- }
-
- // get all extern Assemblies used in this Assembly Builder
- def getExternAssemblies(): Array[Assembly] = {
- externAssemblies = scala.collection.mutable.Set[Assembly]()
- val iter = Assembly.assemblies.values().iterator
- while (iter.hasNext) {
- externAssemblies += iter.next.asInstanceOf[Assembly]
- }
- externAssemblies -= this
- return externAssemblies.toArray
- }
-
- def loadModules() {}
-
- // contains list of generated .msil files after calling Save()
- var generatedFiles = scala.collection.mutable.ArrayBuffer.empty[String]
-
- //##########################################################################
- //##########################################################################
-
- /** the apply method for a visitor */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseAssemblyBuilder(this)
- }
-
- //##########################################################################
-}
-
-object AssemblyBuilderFactory {
- /**
- * Defines a dynamic assembly with the specified name.
- */
- def DefineDynamicAssembly(name: AssemblyName): AssemblyBuilder = {
- //Assembly.reset()
- return new AssemblyBuilder(name)
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
deleted file mode 100644
index ddd4708ecd..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import ch.epfl.lamp.compiler.msil.Type
-import java.io.IOException
-
-/**
- * Defines and represents a constructor of a dynamic class.
- * ConstructorBuilder is used to fully describe a constructor in
- * Microsoft intermediate language (MSIL), including the name, attributes,
- * signature, and constructor body. It is used in conjunction with the
- * TypeBuilder class to create classes at run time. Call DefineConstructor
- * to get an instance of ConstructorBuilder.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ConstructorBuilder(declType: Type, attrs: Int, paramTypes: Array[Type])
- extends ConstructorInfo(declType, attrs, paramTypes)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /** Defines a parameter of this constructor. */
- def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
- val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
- params(pos) = param
- return param
- }
-
- /** Returns an ILGenerator for this constructor. */
- def GetILGenerator(): ILGenerator = {
- return ilGenerator
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** The apply method for a visitor. */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseConstructorBuilder(this)
- }
-
- //##########################################################################
-
- // the Intermediate Language Generator
- // it contains the method's body
- protected var ilGenerator: ILGenerator = new ILGenerator(this)
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
deleted file mode 100644
index 7ef9dc7a5b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.FieldInfo
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.FieldAttributes
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-
-import java.io.IOException
-
-/**
- * Discovers the attributes of a field and provides access to field metadata.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class FieldBuilder(name: String, declType: Type, attrs: Int, fieldTypeWithMods: PECustomMod)
- extends FieldInfo(name, declType, attrs, fieldTypeWithMods, null)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** the apply method for a visitor */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseFieldBuilder(this)
- }
-
- //##########################################################################
-
- protected var defaultValue: Object = _
-
- /** Sets the default value of this field. */
- def SetConstant(defaultValue: Object) {
- this.defaultValue = defaultValue
- }
-
- /** Specifies the field layout. */
- def SetOffset(iOffset: Int) {
- //this.fieldOffset = FieldAttributes.Offset.Value(iOffset)
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
deleted file mode 100644
index 5d74d3aa95..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-
-/**
- * Declares the possibility to set a custom attribute for a member
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-trait ICustomAttributeSetter {
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte])
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
deleted file mode 100644
index 2aa9a99054..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILGenerator.scala
+++ /dev/null
@@ -1,539 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.util.Table
-import java.util.Stack
-import java.io.IOException
-import ILGenerator._
-
-/**
- * Generates Microsoft intermediate language (MSIL) instructions.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
- final class ILGenerator(_owner: MethodBase) extends Visitable {
-
- //##########################################################################
- // public interface
-
- /**
- * Puts the specified instruction onto the stream of instructions.
- */
- def Emit(opcode: OpCode) {
- // switch opcode
- if (opcode == OpCode.Ret) {
- emit(opcode, null, 0)
- } else {
- emit(opcode, null)
- }
- }
-
- /**
- * Puts the specified instruction and character argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Char) {
- emit(opcode,new Character(arg))
- }
-
- /**
- * Puts the specified instruction and metadata token for the
- * specified constructor onto the Microsoft intermediate language
- * (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: ConstructorInfo) {
- assert(arg != null)
- // newobj
- // pop size is the number of parameters
- emit(opcode,arg, OpCode.PUSH_size(opcode.CEE_push) -
- arg.GetParameters().length)
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate language (MSIL)
- * stream followed by the index of the given local variable.
- */
- def Emit(opcode: OpCode, arg: LocalBuilder) {
- assert(arg != null)
- // ldarg | ldarg.s | ldarga
- // ldarga.s | ldloc | ldloc.s | ldloca
- // ldloca.s | starg | starg.s | stloc
- // stloc.s
-
- // <instr_var> <localname>
- emit(opcode, arg)
- }
-
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Double) {
- // ldc.r4 | ldc.r8
- emit(opcode, new java.lang.Double(arg))
- }
-
- /**
- * Puts the specified instruction and metadata token for the
- * specified field onto the Microsoft intermediate language (MSIL)
- * stream of instructions.
- */
- def Emit(opcode: OpCode,arg: FieldInfo) {
- assert(arg != null)
- // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld
- emit(opcode,arg)
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Short ) {
- emit(opcode, new java.lang.Short(arg))
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Int) {
- // ldc.i4 | ldc.i4.s | unaligned
- emit(opcode, new java.lang.Integer(arg))
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Long) {
- // ldc.i8
- emit(opcode, new java.lang.Long(arg))
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream and leaves space to include a label when
- * fixes are done.
- */
- def Emit(opcode: OpCode,label: Label) {
- assert(label != null)
- // beq | beq.s | bge | bge.s |
- // bge.un | bge.un.s | bgt | bgt.s | bgt.un | bgt.un.s |
- // ble | ble.s | ble.un | ble.un.s | blt | blt.s |
- // blt.un | blt.un.s | bne.un | bne.un.s | br | br.s |
- // brfalse | brfalse.s | brtrue | brtrue.s | leave | leave.s
-
- emit(opcode, label)
- // is the label initialized ? if true backward jump else forward jump
- if (label.isInitialized()) {
-// if (arg.stacksize != lastLabel.stacksize) {
-// System.err.println("ILGenerator.Emit: Stack depth differs depending on path:");
-// System.err.println("\tmethod = " + owner);
-// System.err.println("\tPC = 0x" + Table.short2hex(lastLabel.address));
-// }
- //assert arg.stacksize == lastLabel.stacksize;
- }
- else {
- label.setStacksize(lastLabel.getStacksize())
- }
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream and leaves space to include a label when
- * fixes are done.
- */
- def Emit(opcode: OpCode, arg: Array[Label] ) {
- assert(arg != null)
- // switch
-
- // <instr> ::= <instr_switch> ( <labels> )
- // Examples:
- // switch (0x3, -14, Label1)
- // switch (5, Label2)
- emit(opcode, arg, arg.length)
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream followed by the metadata token for the
- * given method.
- */
- def Emit(opcode: OpCode,arg: MethodInfo) {
- assert(arg != null)
- // call | callvirt | jmp | ldftn | ldvirtftn
- // pop size is the number of parameters
- // pop 1 more if method is not static !
- // push size is either 0 (void Method) either 1
- assert(arg.ReturnType != null, "No ReturnType: " + arg.DeclaringType + "::" + arg.Name)
-
- val popush: Int = if (opcode == OpCode.Ldftn ||
- opcode == OpCode.Ldvirtftn ||
- opcode == OpCode.Jmp)
- {
- OpCode.PUSH_size(opcode.CEE_push) - OpCode.POP_size(opcode.CEE_pop)
- } else if (opcode == OpCode.Calli || opcode == OpCode.Callvirt) {
- (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length - 1
- } else {
- (if(arg.ReturnType == VOID) 0 else 1) - arg.GetParameters().length
- }
- emit(opcode, arg, popush)
- }
-
- /**
- * Puts the specified instruction and numerical argument onto
- * the Microsoft intermediate language (MSIL) stream of instructions.
- */
- def Emit(opcode: OpCode, arg: Float ) {
- emit(opcode, new java.lang.Float(arg))
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream followed by the metadata token for the
- * given string.
- */
- def Emit(opcode: OpCode, arg: String ) {
- assert(arg != null)
- // ldstr
- emit(opcode, arg)
- }
-
- /**
- * Puts the specified instruction onto the Microsoft intermediate
- * language (MSIL) stream followed by the metadata token for the
- * given type.
- */
- def Emit(opcode: OpCode, arg: Type) {
- assert(arg != null)
- // box | castclass | cpobj | initobj | isinst |
- // ldelema | ldobj | mkrefany | newarr | refanyval |
- // sizeof | stobj | unbox
-
- emit(opcode, arg)
- }
-
- /**
- * Puts a call or callvirt instruction onto the Microsoft intermediate
- * language (MSIL) stream.
- */
- def EmitCall(opcode: OpCode, arg: MethodInfo,
- optionalParameterTypes: Array[Type]) {
- assert(arg != null)
- // pop size is the number of parameters
- // push size is either 0 (void Method) either 1
- //System.out.println(arg.ReturnType.Size + " " + arg.GetParameters().length);
- emit(opcode, arg, (if(arg.ReturnType == VOID) 0 else 1) -
- arg.GetParameters().length)
- }
-
- /**
- * Emits the Microsoft intermediate language (MSIL) necessary to
- * call WriteLine with the given field.
- */
- def EmitWriteLine(arg: FieldInfo) {
- // first load field info
- // if static use OpCode.Ldsfld
- if (arg.IsStatic())
- Emit(OpCodes.Ldsfld, arg)
- else
- Emit(OpCodes.Ldfld, arg)
- // then call System.Console.WriteLine(arg.Type)
- val t: Type = Type.GetType("System.Console")
- val argsType: Array[Type] = new Array[Type](1)
- argsType(0) = arg.FieldType
- val m: MethodInfo = t.GetMethod("WriteLine", argsType)
- EmitCall(OpCode.Call, m, null)
- }
-
- /**
- * Emits the Microsoft intermediate language (MSIL) necessary
- * to call WriteLine with the given local variable.
- */
- def EmitWriteLine(arg: LocalBuilder) {
- // first load local variable
- Emit(OpCodes.Ldloc, arg)
- // then call System.Console.WriteLine(arg.Type)
- val t: Type = Type.GetType("System.Console")
- val argsType: Array[Type] = new Array[Type](1)
- argsType(0) = arg.LocalType
- val m: MethodInfo = t.GetMethod("WriteLine", argsType)
- EmitCall(OpCode.Call, m, null)
- }
-
- /**
- * Emits the Microsoft intermediate language (MSIL) to call
- * WriteLine with a string.
- */
- def EmitWriteLine(arg: String) {
- // first load string
- Emit(OpCode.Ldstr, arg)
- // then call System.Console.WriteLine(string)
- val t: Type = Type.GetType("System.Console")
- val argsType: Array[Type] = new Array[Type](1)
- argsType(0) = Type.GetType("System.String")
- val m: MethodInfo = t.GetMethod("WriteLine", argsType)
- EmitCall(OpCode.Call, m, null)
- }
-
- /**
- * Declares a local variable.
- */
- def DeclareLocal(localType: Type): LocalBuilder = {
- val l: LocalBuilder = new LocalBuilder(locals, localType)
- locals = locals + 1
- localList += l
- return l
- }
-
- /**
- * Returns a new label that can be used as a token for branching.
- * In order to set the position of the label within the stream, you
- * must call MarkLabel. This is just a token and does not yet represent
- * any particular location within the stream.
- */
- def DefineLabel():Label = {
- new Label.NormalLabel()
- }
-
- /**
- * Marks the Microsoft intermediate language (MSIL) stream's
- * current position with the given label.
- */
- def MarkLabel(label: Label) {
- label.mergeWith(lastLabel)
- /*
- label.address = lastLabel.address;
- //label.stacksize = lastLabel.stacksize;
- if (label.stacksize >= 0)
- lastLabel.stacksize = label.stacksize;
- */
- }
-
- /** Begins a lexical scope. */
- def BeginScope() {
- emitSpecialLabel(Label.NewScope)
- }
-
- /** Ends a lexical scope. */
- def EndScope() {
- emitSpecialLabel(Label.EndScope)
- }
-
- /**
- * Begins an exception block for a non-filtered exception.
- * The label for the end of the block. This will leave you in the correct
- * place to execute finally blocks or to finish the try.
- */
- def BeginExceptionBlock() {
- emitSpecialLabel(Label.Try)
- val endExc: Label = new Label.NormalLabel() // new Label(lastLabel) ???
- excStack.push(Label.Try, endExc)
- }
-
- /** Begins a catch block. */
- def BeginCatchBlock(exceptionType: Type) {
- val kind = excStack.peekKind()
- if (kind == Label.Kind.Try ||
- kind == Label.Kind.Catch) {
- /* ok */
- } else {
- throw new RuntimeException("Catch should follow either a try or catch")
- }
- val endExc: Label = excStack.popLabel()
- Emit(OpCodes.Leave, endExc)
- // the CLI automatically provide the exception object on the evaluation stack
- // we adjust the stacksize
- lastLabel.incStacksize()
- excStack.push(Label.Catch, endExc)
- emitSpecialLabel(Label.Catch, exceptionType)
- }
-
- /** Ends an exception block. */
- def EndExceptionBlock() {
- val kind = excStack.peekKind()
- if (kind == Label.Kind.Try) {
- throw new RuntimeException("Try block with neither catch nor finally")
- } else if (kind == Label.Kind.Catch) {
- Emit(OpCodes.Leave, excStack.peekLabel())
- } else if (kind == Label.Kind.Finally) {
- Emit(OpCodes.Endfinally)
- }
- MarkLabel(excStack.popLabel())
- emitSpecialLabel(Label.EndTry)
- }
-
- /**
- * Begins a finally block in the Microsoft intermediate language
- * (MSIL) instruction stream.
- */
- def BeginFinallyBlock() {
- val endExc: Label = excStack.popLabel()
- Emit(OpCodes.Leave, endExc)
- excStack.push(Label.Finally, endExc)
- emitSpecialLabel(Label.Finally)
- }
-
- /**
- * Emits an instruction to throw an exception.
- */
- def ThrowException(exceptionType: Type) {
- assert(exceptionType != null)
- if (!exceptionType.isSubtypeOf(Type.GetType("System.Exception")))
- throw new RuntimeException
- (exceptionType + " doesn't extend System.Exception" )
- val ctor: ConstructorInfo = exceptionType.GetConstructor(Type.EmptyTypes)
- if (ctor == null)
- throw new RuntimeException("Type " + exceptionType
- + "doesn't have a default constructor")
- Emit(OpCodes.Newobj, ctor)
- Emit(OpCodes.Throw)
- }
-
- /**
- * sets the line of the source file corresponding to the next instruction
- */
- def setPosition(line: Int) {
- if (line != 0) lineNums.put(lastLabel, Integer.toString(line))
- }
-
- def setPosition(line: Int, filename: String) {
- if (line != 0) lineNums.put(lastLabel, line + " '" + filename + "'")
- }
-
- def setPosition(startLine: Int, endLine: Int, startCol: Int, endCol: Int, filename: String) {
- val lineRange = startLine + "," + endLine
- val colRange = startCol + "," + endCol
- lineNums.put(lastLabel, lineRange + ":" + colRange + " '" + filename + "'")
- }
-
- def getLocals(): Array[LocalBuilder] = localList.toArray
-
- def getLabelIterator() = labelList.iterator
-
- def getOpcodeIterator() = opcodeList.iterator
-
- def getArgumentIterator() = argumentList.iterator
-
- //##########################################################################
- // private implementation details
-
-
-
- // the local variable list
- private final val localList = scala.collection.mutable.ArrayBuffer.empty[LocalBuilder]
-
- // the label list, the opcode list and the opcode argument list
- // labelList is an array of Label
- // opcodeList is an array of OpCode
- // argumentList is an array of Object (null if no argument)
- private final val labelList = scala.collection.mutable.ArrayBuffer.empty[Label]
- private final val opcodeList = scala.collection.mutable.ArrayBuffer.empty[OpCode]
- private final val argumentList = scala.collection.mutable.ArrayBuffer.empty[Object]
-
- // the program counter (pc)
- // also called the stream's current position
- private var pc: Int = 0
-
- // last label
- private var lastLabel: Label = new Label.NormalLabel(pc,0)
-
- // the maximum size of stack
- private var maxstack: Int = 0
-
- // the number of the locals
- private var locals: Int = 0
-
- // stack of label for exception mechanism
- private var excStack: ExceptionStack = new ExceptionStack()
-
- // the method info owner of this ILGenerator
- var owner: MethodBase = _owner
-
- val lineNums = scala.collection.mutable.Map.empty[Label, String]
-
-
- def getMaxStacksize(): Int = { this.maxstack }
-
- // private emit with Object Argument
- private def emit(opcode: OpCode, arg: Object) {
- emit(opcode, arg, opcode.CEE_popush)
- }
-
- // private emit with Object Argument and override POPUSH
- private def emit(opcode: OpCode, arg: Object, overridePOPUSH: Int) {
- // add label, opcode and argument
- labelList += lastLabel
- opcodeList += opcode
- argumentList += arg
- // compute new lastLabel (next label)
- val stackSize: Int = lastLabel.getStacksize() + overridePOPUSH
- if (stackSize < 0) {
- val msg = "ILGenerator.emit(): Stack underflow in method: " + owner
- scala.Console.println(msg)
- // throw new RuntimeException(msg)
- }
- if (stackSize > maxstack)
- maxstack = stackSize
- var address: Int = lastLabel.getAddress() + opcode.CEE_length
- if (opcode.CEE_opcode == OpCode.CEE_SWITCH) {
- address = address + 4*arg.asInstanceOf[Array[Label]].length
- }
- lastLabel = new Label.NormalLabel(address, stackSize)
- pc = pc + 1
- }
-
- def Ldarg0WasJustEmitted() : Boolean = {
- if(opcodeList.isEmpty)
- return false
- val lastEmitted = opcodeList(opcodeList.size - 1)
- lastEmitted eq OpCode.Ldarg_0
- }
-
- private def emitSpecialLabel(l: Label) {
- emitSpecialLabel(l, null)
- }
- private def emitSpecialLabel(l: Label, catchType: Type) {
- labelList += l
- opcodeList += null
- argumentList += catchType
- }
-
- //##########################################################################
- //
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseILGenerator(this)
- }
-
- //##########################################################################
-} // class ILGenerator
-
-
-object ILGenerator {
-
- val VOID: Type = Type.GetType("System.Void")
- val NO_LABEL: String = ""
-
- private final class ExceptionStack {
- private val labels = new scala.collection.mutable.Stack[Label]()
- private val kinds = new scala.collection.mutable.Stack[Label]()
- def ExceptionStack() {}
- def pop() { labels.pop; kinds.pop }
- def push(kind: Label, label: Label) {
- kinds.push(kind); labels.push(label)
- }
- def peekKind(): Label.Kind = kinds.top.getKind
- def peekLabel(): Label = labels.top
- def popLabel(): Label = { kinds.pop(); labels.pop() }
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
deleted file mode 100644
index 0ed5e3f3bb..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ /dev/null
@@ -1,861 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.File
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Comparator
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly in a single or multiple files. Then this file can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-abstract class ILPrinterVisitor extends Visitor {
-
- import ILPrinterVisitor._
- import OpCode._
-
- //##########################################################################
-
- protected final val assemblyNameComparator =
- new scala.math.Ordering[Assembly]() {
- override def compare(o1: Assembly, o2: Assembly): Int = {
- val a1 = o1.asInstanceOf[Assembly]
- val a2 = o2.asInstanceOf[Assembly]
- return a1.GetName().Name.compareTo(a2.GetName().Name)
- }
- }
-
- // the output file writer
- protected var out: PrintWriter = null
-
- // the left margin
- private var lmargin = 0
-
- // indicate a newline
- private var newline = true
-
- // print types without or with members?
- protected var nomembers: Boolean = false
-
- // external assemblies
- protected var as: Array[Assembly] = null
-
- private def align() {
- if (newline)
- padding = lmargin
- printPadding()
- newline = false
- }
- private def indent() {
- lmargin += TAB
- }
- private def undent() {
- lmargin -= TAB
- assert(lmargin >= 0)
- }
-
- private var padding = 0
- private def pad(n: Int) {
- assert(n >= 0, "negative padding: " + n)
- padding += n
- }
- private def printPadding() {
- if (padding <= 0)
- return
- while (padding > SPACES_LEN) {
- out.print(SPACES)
- padding -= SPACES_LEN
- }
- out.print(SPACES.substring(0, padding))
- padding = 0
- }
-
- // methods to print code
- protected def print(s: String) { align(); out.print(s)}
- protected def print(o: Object) { align(); out.print(o) }
- protected def print(c: Char) { align(); out.print(c) }
- protected def print(`val`: Int) { align(); out.print(`val`)}
- protected def print(`val`: Long){ align(); out.print(`val`)}
- protected def println() { out.println(); newline = true; padding = 0 }
- protected def println(c: Char) { print(c); println() }
- protected def println(i: Int) { print(i); println() }
- protected def println(l: Long) { print(l); println() }
- protected def println(s: String){ print(s); println() }
- protected def println(o: Object){ print(o); println() }
- protected def printName(name: String) {
- var ch = name.charAt(0)
- //if (Character.isLetter(ch) && Character.isLowerCase(ch)) {
- if ((ch != '.') && (ch != '!')) {
- print('\''); print(name); print('\'')
- } else
- print(name)
- }
-
- protected def printAssemblyBoilerplate() {
- // print all the external assemblies
- for (j <- 0 until as.length) {
- printAssemblySignature(as(j), true)
- }
- // print assembly declaration
- printAssemblySignature(currAssembly, false)
- }
-
- // the entrypoint method
- protected var entryPoint: MethodInfo = null
-
- // current opcode argument
- protected var argument: Object = null
-
- /***/
- @throws(classOf[IOException])
- protected def print(vAble: Visitable) {
- if (vAble != null)
- vAble.apply(this)
- }
-
- /**
- * Visit an AssemblyBuilder
- */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder)
-
- protected var currentModule: Module = null
- /**
- * Visit a ModuleBuilder
- */
- @throws(classOf[IOException])
- def caseModuleBuilder(module: ModuleBuilder)
-
- protected var currentType: Type = null
-
- def printTypeParams(sortedTVars : Array[GenericParamAndConstraints]) {
-
- def constraintFlags(tVar : GenericParamAndConstraints) = {
- val varianceDirective = (if (tVar.isCovariant) "+ " else (if (tVar.isContravariant) "- " else ""))
- val typeKindDirective = (if (tVar.isReferenceType) "class " else (if (tVar.isValueType) "valuetype " else ""))
- val dfltConstrDirective = (if (tVar.hasDefaultConstructor) ".ctor " else "")
- varianceDirective + typeKindDirective + dfltConstrDirective
- }
-
- def tparamName(tVar : GenericParamAndConstraints) = {
- /* TODO Type-params in referenced assemblies may lack a name (those in a TypeBuilder or MethodBuilder shouldn't).
- Given that we need not list (in ilasm syntax) the original type-params' names when
- providing type arguments to it, the only type-param-names we'll serialize into a .msil file
- are those for type-params in a TypeBuilder or MethodBuilder. Still, more details on this
- appear in Sec. 4.5 "Faulty metadata in XMLReaderFactory" of
- http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/Libs4Lib.pdf
-
- To avoid name clashes when choosing a param name,
- first collect all existing tparam-names from a type (and its nested types).
- Not that those names are needed (ordinal positions can be used instead)
- but will look better when disassembling with ildasm. */
- assert(tVar.Name != null)
- tVar.Name
- }
-
- if(sortedTVars.length == 0) { return }
- print('<')
- val lastIdx = sortedTVars.length - 1
- for (it <- 0 until sortedTVars.length) {
- val tVar = sortedTVars(it)
- print(constraintFlags(tVar))
- if(tVar.Constraints.length > 0) {
- print('(')
- val lastCnstrtIdx = tVar.Constraints.length - 1
- for (ic <- 0 until tVar.Constraints.length) {
- val cnstrt = tVar.Constraints(ic)
- printReference(cnstrt)
- if (ic < lastIdx) { print(", ") }
- }
- print(')')
- }
- print(" " + tparamName(tVar))
- if (it < lastIdx) { print(", ") }
- }
- print('>')
- }
-
- /**
- * Visit a TypeBuilder
- */
- @throws(classOf[IOException])
- def caseTypeBuilder(`type`: TypeBuilder) {
- currentType = `type`
- if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) {
- print(".namespace \'" ); print(`type`.Namespace); println("\'")
- println("{"); indent()
- }
- print(".class ")
- // <classHead> ::=
- // <classAttr>* <id>
- // [extends <typeReference>]
- // [implements <typeReference> [, <typeReference>]*]
- print(TypeAttributes.toString(`type`.Attributes))
- print(" \'"); print(`type`.Name); print("\'")
- printTypeParams(`type`.getSortedTVars())
- if (`type`.BaseType() != null) {
- println()
- print(" extends ")
- printReference(`type`.BaseType())
- }
- var ifaces: Array[Type] = `type`.getInterfaces()
- if (ifaces.length > 0) {
- println()
- print(" implements ")
- for (i <- 0 until ifaces.length) {
- if (i > 0) {
- println(",")
- print(" ")
- }
- printReference(ifaces(i))
- }
- }
- println()
- println("{")
- indent()
- if (!nomembers && `type`.sourceFilename != null)
- println(".line " + `type`.sourceLine
- + " '" + `type`.sourceFilename + "'")
- if (!nomembers) {
- printAttributes(`type`)
- }
- // print nested classes
- val nested = `type`.nestedTypeBuilders.iterator
- while(nested.hasNext)
- print(nested.next().asInstanceOf[TypeBuilder])
-
- // print each field
- val fields = `type`.fieldBuilders.iterator
- while(fields.hasNext)
- print(fields.next().asInstanceOf[FieldBuilder])
-
- // print each constructor
- val constrs = `type`.constructorBuilders.iterator
- while (constrs.hasNext)
- print(constrs.next().asInstanceOf[ConstructorBuilder])
-
- // print each method
- val methods = `type`.methodBuilders.iterator
- while (methods.hasNext) {
- val method = methods.next().asInstanceOf[MethodBuilder]
- assert(method.DeclaringType == `type`)
- print(method)
- }
-
- undent(); println("}")
- if (!`type`.Namespace.equals("") && `type`.DeclaringType == null) {
- undent(); println("}")
- }
- currentType = null
- }
-
- /**
- * Visit a FieldBuilder
- */
- @throws(classOf[IOException])
- def caseFieldBuilder(field: FieldBuilder) {
- if (nomembers) return
- // [[int32]] <fieldAttr>* <type> <id> [= <fieldInit> | at <dataLabel>]
- print(".field ")
- print(FieldAttributes.toString(field.Attributes))
- print(" "); printSignature(field.FieldType, field.cmods)
- print(" \'"); print(field.Name); print("\'")
- if (field.IsLiteral()) {
- print(" = ")
- val value = field.getValue()
- if (value == null) {
- print("nullref")
- } else if (value.isInstanceOf[String]) {
- print(msilString(value.asInstanceOf[String]))
- } else if (value.isInstanceOf[Boolean]) {
- print("bool (")
- print(if((value.asInstanceOf[Boolean]).booleanValue()) { "true" } else { "false" })
- print(")")
- } else if (value.isInstanceOf[Byte]) {
- print("int8 (")
- print(value)
- print(")")
- } else if (value.isInstanceOf[java.lang.Short]) {
- print("int16 (")
- print(value)
- print(")")
- } else if (value.isInstanceOf[Character]) {
- print("char (")
- print((value.asInstanceOf[Character]).charValue())
- print(")")
- } else if (value.isInstanceOf[Integer]) {
- print("int32 (")
- print((value.asInstanceOf[Integer]).intValue())
- print(")")
- } else if (value.isInstanceOf[Long]) {
- print("int64 (")
- print((value.asInstanceOf[Long]).longValue())
- print(")")
- } else if (value.isInstanceOf[Float]) {
- print(msilSyntaxFloat(value.asInstanceOf[Float]))
- } else if (value.isInstanceOf[Double]) {
- print(msilSyntaxDouble(value.asInstanceOf[Double]))
- } else {
- throw new Error("ILPrinterVisitor: Illegal default value: "
- + value.getClass())
- }
- }
- println()
- printAttributes(field)
- }
-
- def msilSyntaxFloat(valFlo: java.lang.Float) : String = {
- // !!! check if encoding is correct
- val bits = java.lang.Float.floatToRawIntBits(valFlo.floatValue())
- /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */
- /* Note: no value is equal to Nan, including NaN. Thus, x == Float.NaN always evaluates to false. */
- val res = if (valFlo.isNaN) "0xFFC00000 /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */
- else if (java.lang.Float.NEGATIVE_INFINITY == valFlo.floatValue) "0xFF800000 /* NEGATIVE_INFINITY */ "
- else if (java.lang.Float.POSITIVE_INFINITY == valFlo.floatValue) "0x7F800000 /* POSITIVE_INFINITY */ "
- else bits
- "float32 (" + res + ")"
- }
-
- def msilSyntaxDouble(valDou: java.lang.Double) : String = {
- // !!! check if encoding is correct
- var bits = java.lang.Double.doubleToRawLongBits(valDou.doubleValue())
- /* see p. 170 in Lidin's book Expert .NET 2.0 IL Assembler */
- /* Note: no value is equal to Nan, including NaN. Thus, x == Double.NaN always evaluates to false. */
- val res = if (valDou.isNaN) "0xffffffffffffffff /* NaN */ " /* TODO this is 'quiet NaN, http://www.savrola.com/resources/NaN.html , what's the difference with a 'signaling NaN'?? */
- else if (java.lang.Double.NEGATIVE_INFINITY == valDou.doubleValue) "0xfff0000000000000 /* NEGATIVE_INFINITY */ "
- else if (java.lang.Double.POSITIVE_INFINITY == valDou.doubleValue) "0x7ff0000000000000 /* POSITIVE_INFINITY */ "
- else bits
- // float64(float64(...)) != float64(...)
- "float64 (" + res + ")"
- }
-
- /**
- * Visit a ConstructorBuilder
- */
- @throws(classOf[IOException])
- def caseConstructorBuilder(constr: ConstructorBuilder) {
- if (nomembers) return
- print(".method "); printHeader(constr, VOID)
- println(); println("{"); indent()
- printAttributes(constr)
- try {
- print(constr.GetILGenerator())
- } catch {
- case e : RuntimeException => {
- System.err.println("In method " + constr)
- e.printStackTrace()
- }
- }
- undent(); println("}")
- }
-
- /**
- * Visit a MethodBuilder
- */
- @throws(classOf[IOException])
- def caseMethodBuilder(method: MethodBuilder) {
- if (nomembers) return
- print(".method "); printHeader(method, method.ReturnType)
- if (method.IsAbstract()
- || (method.DeclaringType != null
- && method.DeclaringType.IsInterface()
- && !method.IsStatic()))
- {
- println(" {"); indent()
- printAttributes(method)
- undent(); println("}")
- } else {
- println(); println("{"); indent()
- printAttributes(method)
- if (method == entryPoint)
- println(".entrypoint")
- try {
- print(method.GetILGenerator())
- } catch {
- case e: RuntimeException =>
- System.err.println("In method " + method)
- e.printStackTrace()
- }
- undent(); println("}")
- }
- }
-
- /**
- * Visit a ParameterBuilder
- */
- @throws(classOf[IOException])
- def caseParameterBuilder(param: ParameterBuilder) {
- print(ParameterAttributes.toString(param.Attributes))
- printSignature(param.ParameterType)
- //print(' ') print(marshal)
- print(' '); printName(param.Name)
- }
-
- var locals: Array[LocalBuilder] = null
- /**
- * Visit an ILGenerator
- */
- @throws(classOf[IOException])
- def caseILGenerator(code: ILGenerator) {
- // print maxstack
- println(".maxstack " + code.getMaxStacksize())
- // get the local variables
- locals = code.getLocals()
- if (locals.length > 0) {
- println(".locals init (")
- indent()
- for (i <- 0 until locals.length) {
- if (i > 0) println(",")
- print(locals(i))
- } // end while
- undent()
- println(")")
- }
- // get 3 iterators for the 3 lists
- val itL = code.getLabelIterator()
- val itO = code.getOpcodeIterator()
- val itA = code.getArgumentIterator()
- // iterate over each opcode
- while (itO.hasNext) {
- // first print label
- val label = itL.next
- val oOpt = code.lineNums.get(label)
- if (oOpt.isDefined) {
- println(".line " + oOpt.get)
- }
- argument = itA.next.asInstanceOf[Object]
- printLabel(label)
- val o2 = itO.next
- if (o2 != null) {
- print(" ")
- print(o2.asInstanceOf[OpCode])
- }
- println()
- } // end while
- }
-
- /**
- * visit an OpCode
- */
- @throws(classOf[IOException])
- def caseOpCode(opCode: OpCode) {
- var opString = opCode.toString()
- print(opString)
- pad(14 - opString.length())
-
- // switch opcode
- if (opCode == OpCode.Ldstr) {
- print(msilString(argument.toString()))
- } else if(opCode == OpCode.Switch) {
- // switch ( <labels> )
- print("(")
- val targets = argument.asInstanceOf[Array[Label]]
- val m = targets.length
- for (i <- 0 until m) {
- if (i != 0) print(", ")
- print(targets(i))
- } // end for
- print(")")
- } else if(opCode == OpCode.Call || opCode == OpCode.Callvirt || opCode == OpCode.Jmp || opCode == OpCode.Ldftn || opCode == OpCode.Ldvirtftn) {
- // call | callvirt | jmp | ldftn | ldvirtftn
- // <instr_method> <callConv> <type> [ <typeSpec> :: ] <methodName>
- printSignature(argument.asInstanceOf[MethodBase])
- } else if (opCode == OpCode.Newobj) {
- printSignature(argument.asInstanceOf[ConstructorInfo])
- // ldfld | ldflda | ldsfld | ldsflda | stfld | stsfld
- } else if (opCode == OpCode.Ldfld || opCode == OpCode.Ldflda || opCode == OpCode.Ldsfld || opCode == OpCode.Ldsflda || opCode == OpCode.Stfld || opCode == OpCode.Stsfld) {
- printSignature(argument.asInstanceOf[FieldInfo])
- } else if (opCode == OpCode.Castclass || opCode == OpCode.Isinst || opCode == OpCode.Ldobj || opCode == OpCode.Newarr) {
- printSignature(argument.asInstanceOf[Type])
- } else if (opCode == OpCode.Box || opCode == OpCode.Unbox || opCode == OpCode.Ldtoken || opCode == OpCode.Initobj) {
- printReference(argument.asInstanceOf[Type])
- } else if (opCode == OpCode.Ldloc || opCode == OpCode.Ldloc_S || opCode == OpCode.Ldloca || opCode == OpCode.Ldloca_S || opCode == OpCode.Stloc || opCode == OpCode.Stloc_S) {
- val loc = argument.asInstanceOf[LocalBuilder]
- print(loc.slot); print("\t// "); printSignature(loc.LocalType)
- print(" \'"); print(loc.name); print("\'")
- //print("'") print(((LocalBuilder)argument).name) print("'")
- } else if (opCode == OpCode.Ldloc_0 || opCode == OpCode.Ldloc_1 || opCode == OpCode.Ldloc_2 || opCode == OpCode.Ldloc_3 ) {
- val loc = locals(opCode.CEE_opcode - OpCode.CEE_LDLOC_0)
- print("\t// "); printSignature(loc.LocalType)
- print(" \'"); print(loc.name); print("\'")
- } else if (opCode == OpCode.Stloc_0 || opCode == OpCode.Stloc_1 || opCode == OpCode.Stloc_2 || opCode == OpCode.Stloc_3 ) {
- val loc = locals(opCode.CEE_opcode - OpCode.CEE_STLOC_0)
- print("\t// "); printSignature(loc.LocalType)
- print(" \'"); print(loc.name); print("\'")
- } else if (opCode == OpCode.Readonly) {
- // nothing to do
- } else if (opCode == OpCode.Constrained) {
- printReference(argument.asInstanceOf[Type])
- } else if (opCode == OpCode.Ldelema) {
- printReference(argument.asInstanceOf[Type])
- } else {
- // by default print toString argument if any
- if (argument != null) {
- val strArgument = java.lang.String.valueOf(argument)
- if ( argument.isInstanceOf[java.lang.Float]
- && ( strArgument.equals("NaN")
- || strArgument.equals("-Infinity")
- || strArgument.equals("Infinity")))
- print(msilSyntaxFloat(argument.asInstanceOf[java.lang.Float]))
- else if ( argument.isInstanceOf[java.lang.Double]
- && ( strArgument.equals("NaN")
- || strArgument.equals("-Infinity")
- || strArgument.equals("Infinity")))
- print(msilSyntaxDouble(argument.asInstanceOf[java.lang.Double]))
- else print(strArgument)
- }
-
- } // end switch
- }
-
- /**
- * Visit a Label
- */
- def printLabel(label: Label) {
- val kind = label.getKind()
- if (kind == Label.Kind.Normal) {
- print(label+ ": ")
- } else if (kind == Label.Kind.NewScope) {
- print("{"); indent()
- } else if (kind == Label.Kind.EndScope) {
- undent(); print("}")
- } else if (kind == Label.Kind.Try) {
- print(".try {"); indent()
- } else if (kind == Label.Kind.Catch) {
- undent()
- println("}")
- print("catch ")
- printReference(argument.asInstanceOf[Type])
- print(" {")
- indent()
- } else if (kind == Label.Kind.Filter) {
- undent()
- println("}")
- print("filter {")
- indent()
- } else if (kind == Label.Kind.EndFilter) {
- print("endfilter")
- undent()
- println("}")
- } else if (kind == Label.Kind.Finally) {
- undent()
- println("}")
- print("finally {")
- indent()
- } else if (kind == Label.Kind.EndTry) {
- undent()
- print("}")
- }
- }
-
- /**
- * Visit a LocalBuilder
- */
- @throws(classOf[IOException])
- def caseLocalBuilder(localBuilder: LocalBuilder) {
- // print type
- printSignature(localBuilder.LocalType)
- // space
- print(" \'")
- // print name
- print(localBuilder.name)
- print("\'")
- }
-
-
- //##########################################################################
-
- def printAssemblySignature(assem: Assembly, extern: Boolean) {
- print(".assembly ")
- if (extern)
- print("extern ")
- val an = assem.GetName()
- printName(an.Name); println()
- println("{")
- if (!extern)
- printAttributes(assem)
- val v = an.Version
- if (v != null) {
- print(" .ver "); print(v.Major); print(':'); print(v.Minor)
- print(':'); print(v.Build); print(':')
- print(v.Revision); println()
- }
- var key = an.GetPublicKeyToken()
- if (key != null) {
- print(" .publickeytoken = ("); print(PEFile.bytes2hex(key))
- println(")")
- } else {
- key = an.GetPublicKey()
- if (key != null) {
- print(" .publickey = ("); print(PEFile.bytes2hex(key))
- println(")")
- }
- }
- println("}")
- }
-
-
- def printSignature(field: FieldInfo) {
- printSignature(field.FieldType, field.cmods)
- //print(' ') print(owner)
- print(' ')
- //if (field.IsStatic && field.DeclaringType != currentType) {
- printReference(field.DeclaringType)
- print("::")
- //}
- printName(field.Name)
- }
-
- // print method head
- @throws(classOf[IOException])
- def printHeader(method: MethodBase, returnType: Type) {
- print(MethodAttributes.toString(method.Attributes))
- print(' '); print(CallingConventions.toString(method.CallingConvention))
- print(' '); printSignature(returnType)
- //print(' ') print(marshal)
- print(' '); printName(method.Name)
- if(method.isInstanceOf[MethodInfo]) {
- val mthdInfo = method.asInstanceOf[MethodInfo]
- printTypeParams(mthdInfo.getSortedMVars())
- }
- val params = method.GetParameters()
- print('(')
- for (i <- 0 until params.length) {
- if (i > 0) print(", ")
- print(params(i).asInstanceOf[ParameterBuilder])
- }
- print(") ")
-
- print(MethodImplAttributes
- .toString(method.GetMethodImplementationFlags()))
- }
-
-
- def printSignature(method: MethodBase) {
- var returnType: Type = null
- if (method.isInstanceOf[MethodInfo])
- returnType = (method.asInstanceOf[MethodInfo]).ReturnType
- else if (method.isInstanceOf[ConstructorInfo])
- returnType = VOID
- else
- throw new RuntimeException()
-
- val s = CallingConventions.toString(method.CallingConvention)
- print(s)
- if (s.length() > 0) print(' ')
- printSignature(returnType)
- //print(' ') print(owner)
- print(' '); printReference(method.DeclaringType)
- print("::"); printName(method.Name)
-
- var params = method.GetParameters()
- print("(")
- for (i <- 0 until params.length) {
- if (i > 0) print(", ")
- printSignature(params(i).ParameterType)
- }
- print(")")
- }
-
- def printSignature(marked: Type, cmods: Array[CustomModifier]) {
- printSignature(marked)
- if( (cmods != null) && !cmods.isEmpty ) {
- print(" ")
- for(cm <- cmods) {
- print(if (cm.isReqd) "modreq( " else "modopt( ")
- printReference(cm.marker)
- print(" ) ")
- }
- }
- }
-
- def printSignature(`type`: Type) {
- val sigOpt = primitive.get(`type`)
- if (sigOpt.isDefined) {
- print(sigOpt.get)
- return
- }
- if (`type`.HasElementType()) {
- printSignature(`type`.GetElementType())
- if (`type`.IsArray())
- print("[]")
- else if (`type`.IsPointer())
- print('*')
- else if (`type`.IsByRef())
- print('&')
- } else {
- val preref = if (`type`.isInstanceOf[Type.TMVarUsage]) ""
- else if(`type`.IsValueType()) "valuetype "
- else "class "
- print(preref)
- printReference(`type`)
- }
- }
-
- def printReference(`type`: Type) {
- if (`type`.Module != null) { // i.e. not PrimitiveType and not TMVarUsage
- if (`type`.Assembly() != currentModule.Assembly) {
- print('['); print(`type`.Assembly().GetName().Name); print("]")
- } else if (`type`.Module != currentModule) {
- print("[.module "); print(`type`.Module.Name); print("]")
- }
- }
- printTypeName(`type`)
- }
-
- def printTypeName(`type`: Type) {
- if (`type`.isInstanceOf[ConstructedType]) {
- val ct = `type`.asInstanceOf[ConstructedType]
- printTypeName(ct.instantiatedType)
- print("<")
- var i = 0
- while (i < ct.typeArgs.length) {
- val ta = ct.typeArgs(i)
- val sigOpt = primitive.get(ta)
- if (sigOpt.isDefined) print(sigOpt.get)
- else printTypeName(ta); /* should be printSignature, but don't want `class` or `valuetype`
- appearing before a type param usage. */
- i = i + 1;
- if (i < ct.typeArgs.length) {
- print(", ")
- }
- }
- print(">")
- } else if (`type`.DeclaringType != null) {
- printTypeName(`type`.DeclaringType)
- print('/')
- printName(`type`.Name)
- } else {
- printName(`type`.FullName)
- }
- }
-
- def printAttributes(icap: ICustomAttributeProvider) {
- var attrs = icap.GetCustomAttributes(false)
- for (i <- 0 until attrs.length) {
- print(".custom ")
- printSignature((attrs(i).asInstanceOf[Attribute]).getConstructor())
- print(" = (")
- print(PEFile.bytes2hex((attrs(i).asInstanceOf[Attribute]).getValue()))
- println(")")
- }
- }
-
- //##########################################################################
-
-} // class ILPrinterVisitor
-
-object ILPrinterVisitor {
- final val VOID: Type = Type.GetType("System.Void")
- protected final val TAB = 4
-
- protected final val SPACES = " "
- protected final val SPACES_LEN = SPACES.length()
-
- def hasControlChars(str: String): Boolean = {
- for(i <- 0 until str.length()) {
- var ch = str.charAt(i)
- ch match {
- case '\b' =>
- case '\t' =>
- case '\n' =>
- case '\f' =>
- case '\r' =>
- case _ => if(Character.isISOControl(ch)) return true
- }
- }
- return false
- }
-
- final val EMPTY: String = ""
- def msilString(s: String): String = {
- if (hasControlChars(s)) {
- try {
- return "bytearray (" + PEFile.bytes2hex(s.getBytes("UTF-16LE")) + ")"
- } catch {
- case e : java.io.UnsupportedEncodingException => throw new RuntimeException(e)
- }
- }
- var str = new StringBuffer(s)
- var ss = EMPTY
- var i = 0
- while(i < str.length()) {
- ss = EMPTY
- val c = str.charAt(i)
- c match {
- case '\b' => ss = "\\b"
- case '\t' => ss = "\\t"
- case '\n' => ss = "\\n"
- case '\f' => ss = "\\f"
- case '\r' => ss = "\\r"
- case '\"' => ss = "\\\""
- case '\'' => ss = "\\\'"
- case '\\' => ss = "\\\\"
- case _ => if (Character.isISOControl(c))
- ss = "\\u" + PEFile.int2hex(Character.getNumericValue(c))
- }
- if (ss != EMPTY) {
- str.replace(i, i + 1, ss)
- i = i + ss.length() - 1
- }
- i = i + 1
- }
- return "\"" + str.toString() + "\""
- }
-
- /**
- * the main printer method
- */
- @throws(classOf[IOException])
- def printAssembly(assemblyBuilder: AssemblyBuilder, fileName: String) {
- assemblyBuilder.apply(new SingleFileILPrinterVisitor(fileName))
- }
-
- @throws(classOf[IOException])
- def printAssembly(assemblyBuilder: AssemblyBuilder, destPath: String, sourceFilesPath: String) {
- assemblyBuilder.apply(new MultipleFilesILPrinterVisitor(destPath, sourceFilesPath))
- }
-
- /** The current assembly */
- var currAssembly: Assembly = _
-
- final var primitive = scala.collection.mutable.Map.empty[Type, String]
- def addPrimitive(name: String, sig: String) {
- var `type` =
- Type.GetType(name)
- assert(`type` != null, "Cannot lookup primitive type " + `type`)
- primitive.put(`type`, sig)
- }
-
- addPrimitive("System.Object", "object")
- addPrimitive("System.String", "string")
- addPrimitive("System.Void", "void")
- addPrimitive("System.Boolean", "bool")
- addPrimitive("System.Char", "char")
- addPrimitive("System.SByte", "int8")
- addPrimitive("System.Byte", "unsigned int8")
- addPrimitive("System.Int16", "int16")
- addPrimitive("System.UInt16", "unsigned int16")
- addPrimitive("System.Int32", "int32")
- addPrimitive("System.UInt32", "unsigned int32")
- addPrimitive("System.Int64", "int64")
- addPrimitive("System.UInt64", "unsigned int64")
- addPrimitive("System.IntPtr", "native int")
- addPrimitive("System.UIntPtr", "unsigned native int")
- addPrimitive("System.Single", "float32")
- addPrimitive("System.Double", "float64")
- addPrimitive("System.TypedReference", "typedref")
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
deleted file mode 100644
index 22c1b1150b..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-
-/**
- * Represents a label in the instruction stream. Label is used in conjunction
- * with the ILGenerator class.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-abstract class Label protected {
- import Label._
- def isInitialized(): Boolean
- def getKind(): Kind
- def getAddress(): Int
- def getStacksize(): Int
- def setStacksize(stacksize: Int): Unit
- def incStacksize(): Unit
- def mergeWith(that: Label): Unit
-}
-
-object Label {
- final val DUMMY: Int = -((1<<31)-1)
-
- //##########################################################################
-
- final class NormalLabel(_address: Int, _stacksize: Int) extends Label {
-
- //##########################################################################
- // protected constructors
-
- //the position of the label
- private var address: Int = _address
-
- //the stacksize at the label
- private var stacksize: Int = _stacksize
-
- def this() {
- this(-1, DUMMY)
- }
-
- def this(that: NormalLabel) {
- this(that.getAddress(), that.getStacksize())
- }
-
- //##########################################################################
- // instrumental methods only used by ILGenerator
-
- def isInitialized() = (getAddress() != -1) || (stacksize != DUMMY)
-
- def getAddress() = address
-
- def getStacksize() = stacksize
-
- def setStacksize(stacksize: Int) {
- assert(stacksize >= 0)
- this.stacksize = stacksize
- }
-
- def incStacksize() {
- stacksize = stacksize + 1
- }
-
- def getKind(): Kind = Kind.Normal
-
- def mergeWith(that: Label) {
- //assert address < 0 : "this.address = " + address + " that.address = " + that.address
- address = that.getAddress()
-
- // assert stacksize == that.stacksize
- // : "this.stacksize = " + stacksize + " that.stacksize = "
- // + that.stacksize
- // stacksize = that.stacksize
- val ss: Int = math.max(stacksize, that.getStacksize())
- stacksize = ss
- that.setStacksize(ss)
- }
-
- //##########################################################################
- //
-
- /**
- * the toString Method return the label name
- * it's "IL" + address
- */
- override def toString(): String = {
- var pad: String = ""
- if (address < 16) pad = "000"
- else if (address < 256) pad = "00"
- else if (address < 4096) pad = "0"
- return "IL_" + pad + Integer.toHexString(address)
- }
-
- def getString(): String = {
- val name = super.toString()
- val i: Int = name.lastIndexOf('.')
- return name.substring(i+1, name.length())
- }
- }
-
- //########################################################################
- // Special Labels
-
- final class SpecialLabel(_kind: Label.Kind) extends Label {
- private final var kind: Label.Kind = _kind
- def isInitialized() = true
- def getAddress(): Int = { throw new RuntimeException("" + kind.toString()) }
- def getStacksize(): Int = { throw new RuntimeException("" + kind.toString()) }
- def setStacksize(stacksize: Int) { throw new RuntimeException(kind.toString()) }
- def incStacksize() { throw new RuntimeException(kind.toString()) }
- def getKind(): Kind = kind
- def mergeWith(that: Label) { throw new RuntimeException(kind.toString()) }
- override def toString(): String = "Label(" + kind.toString() + ")"
- }
-
- final val NewScope: Label = new SpecialLabel(Kind.NewScope)
- final val EndScope: Label = new SpecialLabel(Kind.EndScope)
- final val Try: Label = new SpecialLabel(Kind.Try)
- final val Catch: Label = new SpecialLabel(Kind.Catch)
- final val Filter: Label = new SpecialLabel(Kind.Filter)
- final val EndFilter: Label = new SpecialLabel(Kind.EndFilter)
- final val Finally: Label = new SpecialLabel(Kind.Finally)
- final val EndTry: Label = new SpecialLabel(Kind.EndTry)
-
- final class Kind() {}
-
- final object Kind {
- final val Normal: Kind = new Kind()
-
- final val NewScope: Kind = new Kind()
- final val EndScope: Kind = new Kind()
-
- final val Try: Kind = new Kind()
- final val Catch: Kind = new Kind()
- final val Filter: Kind = new Kind()
- final val EndFilter: Kind = new Kind()
- final val Finally: Kind = new Kind()
- final val EndTry: Kind = new Kind()
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
deleted file mode 100644
index 73bca4639f..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/LocalBuilder.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-
-/**
- * Represents a local variable within a method or constructor.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class LocalBuilder(_slot : Int, localType : Type) extends Visitable {
-
- /**
- * the type of the local variable.
- */
- var LocalType : Type = localType
-
- // the name of the local variable
- var name : String = "L_" + slot
-
- // the slot occupied by this local in the corresponding ILGenerator
- var slot : Int = _slot
-
- /**
- * Sets the name of this local variable.
- */
- def SetLocalSymInfo(name : String) {
- this.name = name
- }
-
- override def toString() : String = name
-
- /**
- * the apply method for a visitor
- */
- def apply(v : Visitor) {
- v.caseLocalBuilder(this)
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
deleted file mode 100644
index 237d8fd728..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.MethodInfo
-import ch.epfl.lamp.compiler.msil.ParameterInfo
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import java.io.IOException
-
-/**
- * Defines and represents a method of a dynamic class.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type, paramTypes: Array[Type])
- extends MethodInfo(name, declType, attrs, returnType, paramTypes)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /** Defines a parameter of this method. TODO: Parameters are indexed staring
- * from number 1 for the first parameter
- */
- def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
- val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
- params(pos) = param
- return param
- }
-
- /** Returns an ILGenerator for this method. */
- def GetILGenerator(): ILGenerator = {
- if (ilGenerator == null)
- throw new RuntimeException
- ("No code generator available for this method: " + this)
- return ilGenerator
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** The apply method for a visitor. */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseMethodBuilder(this)
- }
-
- //##########################################################################
-
- // the Intermediate Language Generator
- // it contains the method's body
- protected final val ilGenerator : ILGenerator =
- if (DeclaringType == null // global method
- || !DeclaringType.IsInterface())
- new ILGenerator(this)
- else null
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
deleted file mode 100644
index 981e855e0e..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-import java.io.IOException
-
-/**
- * Defines and represents a module. Get an instance of ModuleBuilder
- * by calling DefineDynamicModule
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly: Assembly)
- extends Module(name, fullname, scopeName, assembly)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
- // public interface
-
- /**
- * Complete the global function definitions for this dynamic module.
- * This method should be called when the user is done with defining
- * all of the global functions within this dynamic module. After calling
- * this function, no more new global functions or new global data are
- * allowed.
- */
- def CreateGlobalFunctions() {
- if (globalsCreated)
- throw new RuntimeException("Global functions are already created")
- this.fields = fieldBuilders.toArray // (fields).asInstanceOf[Array[FieldInfo]]
- this.methods = methodBuilders.toArray // (methods).asInstanceOf[Array[MethodInfo]]
- globalsCreated = true
- }
-
- /**
- * Constructs a TypeBuilder for a type with the specified name
- */
- def DefineType(typeName: String): TypeBuilder = {
- return DefineType(typeName, 0, null, Type.EmptyTypes)
- }
-
- /**
- * Constructs a TypeBuilder for a type with the specified name
- * and specified attributes
- */
- def DefineType(typeName: String, attributes: Int): TypeBuilder = {
- return DefineType(typeName, attributes, null, Type.EmptyTypes)
- }
-
- /**
- * Constructs a TypeBuilder given type name, its attributes,
- * and the type that the defined type extends.
- */
- def DefineType(typeName: String, attributes: Int,
- baseType: Type): TypeBuilder = {
- return DefineType(typeName, attributes, baseType, Type.EmptyTypes)
- }
-
- /**
- * Constructs a TypeBuilder given the Full specification of a type,
- * Given the type name, attributes, the type that the defined type
- * extends, and the interfaces that the defined type implements.
- */
- def DefineType(typeName: String,
- attributes: Int,
- baseType: Type,
- interfaces: Array[Type]): TypeBuilder =
- {
- var t: Type = GetType(typeName) // Module.GetType(String)
- if (t != null)
- throw new RuntimeException
- ("Type [" + Assembly + "]" + typeName + "' already exists!")
- val `type` =
- new TypeBuilder(this, attributes, typeName, baseType, interfaces, null)
- addType(`type`)
- return `type`
- }
-
- /**
- * Defines a global method given its name, attributes, return type, and
- * parameter types.
- */
- def DefineGlobalMethod(name: String, attributes: Int,
- returnType: Type, paramTypes: Array[Type]): MethodBuilder =
- {
- val method =
- new MethodBuilder(name, null, attributes, returnType, paramTypes)
- methodBuilders += method
- return method
- }
-
-
- override def GetTypes(): Array[Type] = {
- val res = scala.collection.mutable.ArrayBuffer.empty[Type]
- val iter = typesMap.values().iterator
- while (iter.hasNext) {
- res += iter.next.asInstanceOf[Type]
- }
- return res.toArray
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
- // internal members
-
- var globalsCreated = false
- protected var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldInfo]
- protected var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodInfo]
-
- override def addType(t: Type): Type = {
- return super.addType(t)
- }
-
- //##########################################################################
-
- /**
- * the apply method for a visitor
- */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseModuleBuilder(this)
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
deleted file mode 100644
index 55c52109b6..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MultipleFilesILPrinterVisitor.scala
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.File
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Iterator
-import java.util.Arrays
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.emit
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly into separate files. Then these files can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @author Daniel Lorch
- * @version 1.0
- */
-final class MultipleFilesILPrinterVisitor(destPath: String, sourceFilesPath: String) extends ILPrinterVisitor {
- /**
- * Visit an AssemblyBuilder
- */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) {
- ILPrinterVisitor.currAssembly = assemblyBuilder
-
- // first get the entryPoint
- this.entryPoint = assemblyBuilder.EntryPoint
-
- // all external assemblies
- as = assemblyBuilder.getExternAssemblies()
- scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator)
-
- // print each module
- var m: Array[Module] = assemblyBuilder.GetModules()
- nomembers = true
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
-
- nomembers = false
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
- ILPrinterVisitor.currAssembly = null
- }
-
- /**
- * Visit a ModuleBuilder
- */
- @throws(classOf[IOException])
- def caseModuleBuilder(module: ModuleBuilder) {
- val assemblyBuilder = ILPrinterVisitor.currAssembly.asInstanceOf[AssemblyBuilder]
-
- // print module declaration
- currentModule = module
-
- // global methods typically contain the main method
- if (!module.globalsCreated)
- module.CreateGlobalFunctions()
-
- var m: Array[MethodInfo] = module.GetMethods()
-
- // "Types" contain all the classes
- var t: Array[Type] = module.GetTypes()
- for(i <- 0 until t.length) {
- val tBuilder = t(i).asInstanceOf[TypeBuilder]
- val sourceFilename = tBuilder.sourceFilename
- val sourceFilepath = new File(tBuilder.sourceFilepath).getCanonicalPath
- val sourcePath = new File(sourceFilesPath).getCanonicalPath
- var append = false
-
- if(!sourceFilepath.startsWith(sourcePath)) {
- throw new IOException("Source file " + sourceFilename + " must lie inside sourcepath " + sourcePath)
- }
-
- assert(sourceFilepath.endsWith(".scala"), "Source file doesn't end with .scala")
- val relativeFilename = sourceFilepath.substring(sourcePath.length, sourceFilepath.length() - 6) + ".msil"
- val fileName = new File(destPath, relativeFilename)
- if(assemblyBuilder.generatedFiles.contains(fileName.getPath)) {
- append = true
- } else {
- fileName.getParentFile().mkdirs()
- assemblyBuilder.generatedFiles += (fileName.getPath)
- }
-
- out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, append)))
- // only write assembly boilerplate and class prototypes
- if (!append && nomembers) {
- printAssemblyBoilerplate()
-
- print(".module \'"); print(module.Name); println("\'")
- printAttributes(module)
- }
-
- print(t(i).asInstanceOf[TypeBuilder])
- out.close()
- }
-
- // now write the global methods (typically contains the "main" method)
- if(!nomembers) {
- var globalMethods: File = new File(destPath, ILPrinterVisitor.currAssembly.GetName().Name + ".msil")
- val append = assemblyBuilder.generatedFiles.contains(globalMethods.getPath)
-
- out = new PrintWriter(new BufferedWriter(new FileWriter(globalMethods, append)))
-
- // make sure we're the first in the list (ilasm uses the first file name to guess the output file name)
- assemblyBuilder.generatedFiles.insert(0, globalMethods.getPath)
-
- // if this file hasn't been created by one of the classes, write boilerplate
- if(!append) {
- printAssemblyBoilerplate()
-
- print(".module \'"); print(module.Name); println("\'")
- printAttributes(module)
- }
-
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[MethodBuilder])
- }
-
- out.close()
- }
-
- currentModule = null
- }
-
-} // class MultipleFilesILPrinterVisitor
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
deleted file mode 100644
index b0c26884af..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ /dev/null
@@ -1,1948 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/** Describes a Microsoft intermediate language (MSIL) instruction.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class OpCode extends Visitable {
- import OpCode._
-
- /** The Operation Code of Microsoft intermediate language (MSIL) instruction. */
- var CEE_opcode : Int = _
-
- /** The name of the Microsoft intermediate language (MSIL) instruction. */
- var CEE_string: String = _
-
- /** The type of Microsoft intermediate language (MSIL) instruction. */
- var CEE_code: Short = _
-
- /** How the Microsoft intermediate language (MSIL) instruction pops the stack. */
- var CEE_pop: Byte = _
-
- /** How the Microsoft intermediate language (MSIL) instruction pushes operand onto the stack. */
- var CEE_push: Byte = _
-
- /** Describes the type of flow control. */
- var CEE_flow: Byte = _
-
- /** ????? */
- var CEE_inline: Byte = _
-
- var CEE_length: Byte = _
-
- var CEE_popush: Byte = _
-
- /**
- * the apply method for a visitor
- */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseOpCode(this)
- }
-
- protected def length(): Byte = {
- val code = OpCode.length(CEE_code)
- val inline = OpCode.INLINE_length(CEE_inline)
- return if(inline < 0) { -1 } else { (code + inline).toByte }
- }
-
- protected def popush(): Byte = {
- val pop = OpCode.POP_size(CEE_pop)
- val push = OpCode.PUSH_size(CEE_push)
- return if(pop < 0 || push < 0) { OpCode.POPUSH_SPECIAL } else { (push - pop).toByte }
- }
-
- override def toString(): String = {
- return CEE_string
- }
-}
-
-object OpCode {
-
- //########################################################################
- // Common Execution Environment opcodes
-
- final val CEE_NOP : Int = 0x0000
- final val CEE_BREAK : Int = 0x0001
- final val CEE_LDARG_0 : Int = 0x0002
- final val CEE_LDARG_1 : Int = 0x0003
- final val CEE_LDARG_2 : Int = 0x0004
- final val CEE_LDARG_3 : Int = 0x0005
- final val CEE_LDLOC_0 : Int = 0x0006
- final val CEE_LDLOC_1 : Int = 0x0007
- final val CEE_LDLOC_2 : Int = 0x0008
- final val CEE_LDLOC_3 : Int = 0x0009
- final val CEE_STLOC_0 : Int = 0x000A
- final val CEE_STLOC_1 : Int = 0x000B
- final val CEE_STLOC_2 : Int = 0x000C
- final val CEE_STLOC_3 : Int = 0x000D
- final val CEE_LDARG_S : Int = 0x000E
- final val CEE_LDARGA_S : Int = 0x000F
- final val CEE_STARG_S : Int = 0x0010
- final val CEE_LDLOC_S : Int = 0x0011
- final val CEE_LDLOCA_S : Int = 0x0012
- final val CEE_STLOC_S : Int = 0x0013
- final val CEE_LDNULL : Int = 0x0014
- final val CEE_LDC_I4_M1 : Int = 0x0015
- final val CEE_LDC_I4_0 : Int = 0x0016
- final val CEE_LDC_I4_1 : Int = 0x0017
- final val CEE_LDC_I4_2 : Int = 0x0018
- final val CEE_LDC_I4_3 : Int = 0x0019
- final val CEE_LDC_I4_4 : Int = 0x001A
- final val CEE_LDC_I4_5 : Int = 0x001B
- final val CEE_LDC_I4_6 : Int = 0x001C
- final val CEE_LDC_I4_7 : Int = 0x001D
- final val CEE_LDC_I4_8 : Int = 0x001E
- final val CEE_LDC_I4_S : Int = 0x001F
- final val CEE_LDC_I4 : Int = 0x0020
- final val CEE_LDC_I8 : Int = 0x0021
- final val CEE_LDC_R4 : Int = 0x0022
- final val CEE_LDC_R8 : Int = 0x0023
- final val CEE_UNUSED49 : Int = 0x0024
- final val CEE_DUP : Int = 0x0025
- final val CEE_POP : Int = 0x0026
- final val CEE_JMP : Int = 0x0027
- final val CEE_CALL : Int = 0x0028
- final val CEE_CALLI : Int = 0x0029
- final val CEE_RET : Int = 0x002A
- final val CEE_BR_S : Int = 0x002B
- final val CEE_BRFALSE_S : Int = 0x002C
- final val CEE_BRTRUE_S : Int = 0x002D
- final val CEE_BEQ_S : Int = 0x002E
- final val CEE_BGE_S : Int = 0x002F
- final val CEE_BGT_S : Int = 0x0030
- final val CEE_BLE_S : Int = 0x0031
- final val CEE_BLT_S : Int = 0x0032
- final val CEE_BNE_UN_S : Int = 0x0033
- final val CEE_BGE_UN_S : Int = 0x0034
- final val CEE_BGT_UN_S : Int = 0x0035
- final val CEE_BLE_UN_S : Int = 0x0036
- final val CEE_BLT_UN_S : Int = 0x0037
- final val CEE_BR : Int = 0x0038
- final val CEE_BRFALSE : Int = 0x0039
- final val CEE_BRTRUE : Int = 0x003A
- final val CEE_BEQ : Int = 0x003B
- final val CEE_BGE : Int = 0x003C
- final val CEE_BGT : Int = 0x003D
- final val CEE_BLE : Int = 0x003E
- final val CEE_BLT : Int = 0x003F
- final val CEE_BNE_UN : Int = 0x0040
- final val CEE_BGE_UN : Int = 0x0041
- final val CEE_BGT_UN : Int = 0x0042
- final val CEE_BLE_UN : Int = 0x0043
- final val CEE_BLT_UN : Int = 0x0044
- final val CEE_SWITCH : Int = 0x0045
- final val CEE_LDIND_I1 : Int = 0x0046
- final val CEE_LDIND_U1 : Int = 0x0047
- final val CEE_LDIND_I2 : Int = 0x0048
- final val CEE_LDIND_U2 : Int = 0x0049
- final val CEE_LDIND_I4 : Int = 0x004A
- final val CEE_LDIND_U4 : Int = 0x004B
- final val CEE_LDIND_I8 : Int = 0x004C
- final val CEE_LDIND_I : Int = 0x004D
- final val CEE_LDIND_R4 : Int = 0x004E
- final val CEE_LDIND_R8 : Int = 0x004F
- final val CEE_LDIND_REF : Int = 0x0050
- final val CEE_STIND_REF : Int = 0x0051
- final val CEE_STIND_I1 : Int = 0x0052
- final val CEE_STIND_I2 : Int = 0x0053
- final val CEE_STIND_I4 : Int = 0x0054
- final val CEE_STIND_I8 : Int = 0x0055
- final val CEE_STIND_R4 : Int = 0x0056
- final val CEE_STIND_R8 : Int = 0x0057
- final val CEE_ADD : Int = 0x0058
- final val CEE_SUB : Int = 0x0059
- final val CEE_MUL : Int = 0x005A
- final val CEE_DIV : Int = 0x005B
- final val CEE_DIV_UN : Int = 0x005C
- final val CEE_REM : Int = 0x005D
- final val CEE_REM_UN : Int = 0x005E
- final val CEE_AND : Int = 0x005F
- final val CEE_OR : Int = 0x0060
- final val CEE_XOR : Int = 0x0061
- final val CEE_SHL : Int = 0x0062
- final val CEE_SHR : Int = 0x0063
- final val CEE_SHR_UN : Int = 0x0064
- final val CEE_NEG : Int = 0x0065
- final val CEE_NOT : Int = 0x0066
- final val CEE_CONV_I1 : Int = 0x0067
- final val CEE_CONV_I2 : Int = 0x0068
- final val CEE_CONV_I4 : Int = 0x0069
- final val CEE_CONV_I8 : Int = 0x006A
- final val CEE_CONV_R4 : Int = 0x006B
- final val CEE_CONV_R8 : Int = 0x006C
- final val CEE_CONV_U4 : Int = 0x006D
- final val CEE_CONV_U8 : Int = 0x006E
- final val CEE_CALLVIRT : Int = 0x006F
- final val CEE_CPOBJ : Int = 0x0070
- final val CEE_LDOBJ : Int = 0x0071
- final val CEE_LDSTR : Int = 0x0072
- final val CEE_NEWOBJ : Int = 0x0073
- final val CEE_CASTCLASS : Int = 0x0074
- final val CEE_ISINST : Int = 0x0075
- final val CEE_CONV_R_UN : Int = 0x0076
- final val CEE_UNUSED58 : Int = 0x0077
- final val CEE_UNUSED1 : Int = 0x0078
- final val CEE_UNBOX : Int = 0x0079
- final val CEE_THROW : Int = 0x007A
- final val CEE_LDFLD : Int = 0x007B
- final val CEE_LDFLDA : Int = 0x007C
- final val CEE_STFLD : Int = 0x007D
- final val CEE_LDSFLD : Int = 0x007E
- final val CEE_LDSFLDA : Int = 0x007F
- final val CEE_STSFLD : Int = 0x0080
- final val CEE_STOBJ : Int = 0x0081
- final val CEE_CONV_OVF_I1_UN : Int = 0x0082
- final val CEE_CONV_OVF_I2_UN : Int = 0x0083
- final val CEE_CONV_OVF_I4_UN : Int = 0x0084
- final val CEE_CONV_OVF_I8_UN : Int = 0x0085
- final val CEE_CONV_OVF_U1_UN : Int = 0x0086
- final val CEE_CONV_OVF_U2_UN : Int = 0x0087
- final val CEE_CONV_OVF_U4_UN : Int = 0x0088
- final val CEE_CONV_OVF_U8_UN : Int = 0x0089
- final val CEE_CONV_OVF_I_UN : Int = 0x008A
- final val CEE_CONV_OVF_U_UN : Int = 0x008B
- final val CEE_BOX : Int = 0x008C
- final val CEE_NEWARR : Int = 0x008D
- final val CEE_LDLEN : Int = 0x008E
- final val CEE_LDELEMA : Int = 0x008F
- final val CEE_LDELEM_I1 : Int = 0x0090
- final val CEE_LDELEM_U1 : Int = 0x0091
- final val CEE_LDELEM_I2 : Int = 0x0092
- final val CEE_LDELEM_U2 : Int = 0x0093
- final val CEE_LDELEM_I4 : Int = 0x0094
- final val CEE_LDELEM_U4 : Int = 0x0095
- final val CEE_LDELEM_I8 : Int = 0x0096
- final val CEE_LDELEM_I : Int = 0x0097
- final val CEE_LDELEM_R4 : Int = 0x0098
- final val CEE_LDELEM_R8 : Int = 0x0099
- final val CEE_LDELEM_REF : Int = 0x009A
- final val CEE_STELEM_I : Int = 0x009B
- final val CEE_STELEM_I1 : Int = 0x009C
- final val CEE_STELEM_I2 : Int = 0x009D
- final val CEE_STELEM_I4 : Int = 0x009E
- final val CEE_STELEM_I8 : Int = 0x009F
- final val CEE_STELEM_R4 : Int = 0x00A0
- final val CEE_STELEM_R8 : Int = 0x00A1
- final val CEE_STELEM_REF : Int = 0x00A2
- final val CEE_UNUSED2 : Int = 0x00A3
- final val CEE_UNUSED3 : Int = 0x00A4
- final val CEE_UNUSED4 : Int = 0x00A5
- final val CEE_UNUSED5 : Int = 0x00A6
- final val CEE_UNUSED6 : Int = 0x00A7
- final val CEE_UNUSED7 : Int = 0x00A8
- final val CEE_UNUSED8 : Int = 0x00A9
- final val CEE_UNUSED9 : Int = 0x00AA
- final val CEE_UNUSED10 : Int = 0x00AB
- final val CEE_UNUSED11 : Int = 0x00AC
- final val CEE_UNUSED12 : Int = 0x00AD
- final val CEE_UNUSED13 : Int = 0x00AE
- final val CEE_UNUSED14 : Int = 0x00AF
- final val CEE_UNUSED15 : Int = 0x00B0
- final val CEE_UNUSED16 : Int = 0x00B1
- final val CEE_UNUSED17 : Int = 0x00B2
- final val CEE_CONV_OVF_I1 : Int = 0x00B3
- final val CEE_CONV_OVF_U1 : Int = 0x00B4
- final val CEE_CONV_OVF_I2 : Int = 0x00B5
- final val CEE_CONV_OVF_U2 : Int = 0x00B6
- final val CEE_CONV_OVF_I4 : Int = 0x00B7
- final val CEE_CONV_OVF_U4 : Int = 0x00B8
- final val CEE_CONV_OVF_I8 : Int = 0x00B9
- final val CEE_CONV_OVF_U8 : Int = 0x00BA
- final val CEE_UNUSED50 : Int = 0x00BB
- final val CEE_UNUSED18 : Int = 0x00BC
- final val CEE_UNUSED19 : Int = 0x00BD
- final val CEE_UNUSED20 : Int = 0x00BE
- final val CEE_UNUSED21 : Int = 0x00BF
- final val CEE_UNUSED22 : Int = 0x00C0
- final val CEE_UNUSED23 : Int = 0x00C1
- final val CEE_REFANYVAL : Int = 0x00C2
- final val CEE_CKFINITE : Int = 0x00C3
- final val CEE_UNUSED24 : Int = 0x00C4
- final val CEE_UNUSED25 : Int = 0x00C5
- final val CEE_MKREFANY : Int = 0x00C6
- final val CEE_UNUSED59 : Int = 0x00C7
- final val CEE_UNUSED60 : Int = 0x00C8
- final val CEE_UNUSED61 : Int = 0x00C9
- final val CEE_UNUSED62 : Int = 0x00CA
- final val CEE_UNUSED63 : Int = 0x00CB
- final val CEE_UNUSED64 : Int = 0x00CC
- final val CEE_UNUSED65 : Int = 0x00CD
- final val CEE_UNUSED66 : Int = 0x00CE
- final val CEE_UNUSED67 : Int = 0x00CF
- final val CEE_LDTOKEN : Int = 0x00D0
- final val CEE_CONV_U2 : Int = 0x00D1
- final val CEE_CONV_U1 : Int = 0x00D2
- final val CEE_CONV_I : Int = 0x00D3
- final val CEE_CONV_OVF_I : Int = 0x00D4
- final val CEE_CONV_OVF_U : Int = 0x00D5
- final val CEE_ADD_OVF : Int = 0x00D6
- final val CEE_ADD_OVF_UN : Int = 0x00D7
- final val CEE_MUL_OVF : Int = 0x00D8
- final val CEE_MUL_OVF_UN : Int = 0x00D9
- final val CEE_SUB_OVF : Int = 0x00DA
- final val CEE_SUB_OVF_UN : Int = 0x00DB
- final val CEE_ENDFINALLY : Int = 0x00DC
- final val CEE_LEAVE : Int = 0x00DD
- final val CEE_LEAVE_S : Int = 0x00DE
- final val CEE_STIND_I : Int = 0x00DF
- final val CEE_CONV_U : Int = 0x00E0
- final val CEE_UNUSED26 : Int = 0x00E1
- final val CEE_UNUSED27 : Int = 0x00E2
- final val CEE_UNUSED28 : Int = 0x00E3
- final val CEE_UNUSED29 : Int = 0x00E4
- final val CEE_UNUSED30 : Int = 0x00E5
- final val CEE_UNUSED31 : Int = 0x00E6
- final val CEE_UNUSED32 : Int = 0x00E7
- final val CEE_UNUSED33 : Int = 0x00E8
- final val CEE_UNUSED34 : Int = 0x00E9
- final val CEE_UNUSED35 : Int = 0x00EA
- final val CEE_UNUSED36 : Int = 0x00EB
- final val CEE_UNUSED37 : Int = 0x00EC
- final val CEE_UNUSED38 : Int = 0x00ED
- final val CEE_UNUSED39 : Int = 0x00EE
- final val CEE_UNUSED40 : Int = 0x00EF
- final val CEE_UNUSED41 : Int = 0x00F0
- final val CEE_UNUSED42 : Int = 0x00F1
- final val CEE_UNUSED43 : Int = 0x00F2
- final val CEE_UNUSED44 : Int = 0x00F3
- final val CEE_UNUSED45 : Int = 0x00F4
- final val CEE_UNUSED46 : Int = 0x00F5
- final val CEE_UNUSED47 : Int = 0x00F6
- final val CEE_UNUSED48 : Int = 0x00F7
- final val CEE_PREFIX7 : Int = 0x00F8
- final val CEE_PREFIX6 : Int = 0x00F9
- final val CEE_PREFIX5 : Int = 0x00FA
- final val CEE_PREFIX4 : Int = 0x00FB
- final val CEE_PREFIX3 : Int = 0x00FC
- final val CEE_PREFIX2 : Int = 0x00FD
- final val CEE_PREFIX1 : Int = 0x00FE
- final val CEE_PREFIXREF : Int = 0x00FF
-
- final val CEE_ARGLIST : Int = 0x0100
- final val CEE_CEQ : Int = 0x0101
- final val CEE_CGT : Int = 0x0102
- final val CEE_CGT_UN : Int = 0x0103
- final val CEE_CLT : Int = 0x0104
- final val CEE_CLT_UN : Int = 0x0105
- final val CEE_LDFTN : Int = 0x0106
- final val CEE_LDVIRTFTN : Int = 0x0107
- final val CEE_UNUSED56 : Int = 0x0108
- final val CEE_LDARG : Int = 0x0109
- final val CEE_LDARGA : Int = 0x010A
- final val CEE_STARG : Int = 0x010B
- final val CEE_LDLOC : Int = 0x010C
- final val CEE_LDLOCA : Int = 0x010D
- final val CEE_STLOC : Int = 0x010E
- final val CEE_LOCALLOC : Int = 0x010F
- final val CEE_UNUSED57 : Int = 0x0110
- final val CEE_ENDFILTER : Int = 0x0111
- final val CEE_UNALIGNED : Int = 0x0112
- final val CEE_VOLATILE : Int = 0x0113
- final val CEE_TAILCALL : Int = 0x0114
- final val CEE_INITOBJ : Int = 0x0115
- final val CEE_CONSTRAINED : Int = 0xFE16
- final val CEE_READONLY : Int = 0xFE1E
- final val CEE_UNUSED68 : Int = 0x0116
- final val CEE_CPBLK : Int = 0x0117
- final val CEE_INITBLK : Int = 0x0118
- final val CEE_UNUSED69 : Int = 0x0119
- final val CEE_RETHROW : Int = 0x011A
- final val CEE_UNUSED51 : Int = 0x011B
- final val CEE_SIZEOF : Int = 0x011C
- final val CEE_REFANYTYPE : Int = 0x011D
- final val CEE_UNUSED52 : Int = 0x011E
- final val CEE_UNUSED53 : Int = 0x011F
- final val CEE_UNUSED54 : Int = 0x0120
- final val CEE_UNUSED55 : Int = 0x0121
- final val CEE_UNUSED70 : Int = 0x0122
-
- final val CEE_ILLEGAL : Int = 0x0140
- final val CEE_MACRO_END : Int = 0x0141
-
- final val CEE_BRNULL : Int = 0x0180 // CEE_BRFALSE
- final val CEE_BRNULL_S : Int = 0x0181 // CEE_BRFALSE_S
- final val CEE_BRZERO : Int = 0x0182 // CEE_BRFALSE
- final val CEE_BRZERO_S : Int = 0x0183 // CEE_BRFALSE_S
- final val CEE_BRINST : Int = 0x0184 // CEE_BRTRUE
- final val CEE_BRINST_S : Int = 0x0185 // CEE_BRTRUE_S
- final val CEE_LDIND_U8 : Int = 0x0186 // CEE_LDIND_I8
- final val CEE_LDELEM_U8 : Int = 0x0187 // CEE_LDELEM_I8
- final val CEE_LDC_I4_M1x : Int = 0x0188 // CEE_LDC_I4_M1
- final val CEE_ENDFAULT : Int = 0x0189 // CEE_ENDFINALLY
-
- final val CEE_BRNONZERO : Int = 0x01C0 // CEE_BRTRUE
- final val CEE_BRNONZERO_S : Int = 0x01C1 // CEE_BRTRUE_S
-
- final val CEE_BRNOT : Int = 0x01C2
- final val CEE_BRNOT_S : Int = 0x01C3
- final val CEE_NOCODE : Int = 0x01C4
-
- final val CEE_count : Int = 0x0200
-
-
- //########################################################################
- // Opcode's amount and type of poped data
-
- final val POP_NONE : Byte = 0x00
- final val POP_1 : Byte = 0x01
- final val POP_1_1 : Byte = 0x02
- final val POP_I : Byte = 0x03
- final val POP_I_1 : Byte = 0x04
- final val POP_I_I : Byte = 0x05
- final val POP_I_I8 : Byte = 0x06
- final val POP_I_R4 : Byte = 0x07
- final val POP_I_R8 : Byte = 0x08
- final val POP_I_I_I : Byte = 0x09
- final val POP_REF : Byte = 0x0A
- final val POP_REF_1 : Byte = 0x0B
- final val POP_REF_I : Byte = 0x0C
- final val POP_REF_I_I : Byte = 0x0D
- final val POP_REF_I_I8 : Byte = 0x0E
- final val POP_REF_I_R4 : Byte = 0x0F
- final val POP_REF_I_R8 : Byte = 0x10
- final val POP_REF_I_REF : Byte = 0x11
- final val POP_SPECIAL : Byte = 0x12
- final val POP_count : Int = 0x13
- final val POP_size : Array[Byte] = new Array[Byte](POP_count)
-
- POP_size(POP_NONE) = 0
- POP_size(POP_1) = 1
- POP_size(POP_1_1) = 2
- POP_size(POP_I) = 1
- POP_size(POP_I_1) = 2
- POP_size(POP_I_I) = 2
- POP_size(POP_I_I8) = 2
- POP_size(POP_I_R4) = 2
- POP_size(POP_I_R8) = 2
- POP_size(POP_I_I_I) = 3
- POP_size(POP_REF) = 1
- POP_size(POP_REF_1) = 2
- POP_size(POP_REF_I) = 2
- POP_size(POP_REF_I_I) = 3
- POP_size(POP_REF_I_I8) = 3
- POP_size(POP_REF_I_R4) = 3
- POP_size(POP_REF_I_R8) = 3
- POP_size(POP_REF_I_REF) = 3
- POP_size(POP_SPECIAL) = -1
-
- //########################################################################
- // Opcode's amount and type of pushed data
-
- final val PUSH_NONE : Byte = 0x00
- final val PUSH_1 : Byte = 0x01
- final val PUSH_1_1 : Byte = 0x02
- final val PUSH_I : Byte = 0x03
- final val PUSH_I8 : Byte = 0x04
- final val PUSH_R4 : Byte = 0x05
- final val PUSH_R8 : Byte = 0x06
- final val PUSH_REF : Byte = 0x07
- final val PUSH_SPECIAL : Byte = 0x08
- final val PUSH_count : Int = 0x09
- final val PUSH_size : Array[Byte] = new Array[Byte](PUSH_count)
-
- PUSH_size(PUSH_NONE) = 0
- PUSH_size(PUSH_1) = 1
- PUSH_size(PUSH_1_1) = 2
- PUSH_size(PUSH_I) = 1
- PUSH_size(PUSH_I8) = 1
- PUSH_size(PUSH_R4) = 1
- PUSH_size(PUSH_R8) = 1
- PUSH_size(PUSH_REF) = 1
- PUSH_size(PUSH_SPECIAL) = -1
-
- //########################################################################
- // Opcode's amount of moved data
-
- final val POPUSH_SPECIAL : Byte = -128
-
- //########################################################################
- // Opcode's inline argument types
-
- final val INLINE_NONE : Byte = 0x00
- final val INLINE_VARIABLE_S : Byte = 0x01
- final val INLINE_TARGET_S : Byte = 0x02
- final val INLINE_I_S : Byte = 0x03
- final val INLINE_VARIABLE : Byte = 0x04
- final val INLINE_TARGET : Byte = 0x05
- final val INLINE_I : Byte = 0x06
- final val INLINE_I8 : Byte = 0x07
- final val INLINE_R : Byte = 0x08
- final val INLINE_R8 : Byte = 0x09
- final val INLINE_STRING : Byte = 0x0A
- final val INLINE_TYPE : Byte = 0x0B
- final val INLINE_FIELD : Byte = 0x0C
- final val INLINE_METHOD : Byte = 0x0D
- final val INLINE_SIGNATURE : Byte = 0x0E
- final val INLINE_TOKEN : Byte = 0x0F
- final val INLINE_SWITCH : Byte = 0x10
- final val INLINE_count : Int = 0x11
- final val INLINE_length : Array[Byte] = new Array[Byte](INLINE_count)
-
- INLINE_length(INLINE_NONE) = 0
- INLINE_length(INLINE_VARIABLE_S) = 1
- INLINE_length(INLINE_TARGET_S) = 1
- INLINE_length(INLINE_I_S) = 1
- INLINE_length(INLINE_VARIABLE) = 2
- INLINE_length(INLINE_TARGET) = 4
- INLINE_length(INLINE_I) = 4
- INLINE_length(INLINE_I8) = 8
- INLINE_length(INLINE_R) = 4
- INLINE_length(INLINE_R8) = 8
- INLINE_length(INLINE_STRING) = 4
- INLINE_length(INLINE_TYPE) = 4
- INLINE_length(INLINE_FIELD) = 4
- INLINE_length(INLINE_METHOD) = 4
- INLINE_length(INLINE_SIGNATURE) = 4
- INLINE_length(INLINE_SWITCH) = 4
- INLINE_length(INLINE_TOKEN) = 4
-
- //########################################################################
- // Opcode's control flow implications
-
- final val FLOW_META : Byte = 0x00
- final val FLOW_NEXT : Byte = 0x01
- final val FLOW_BRANCH : Byte = 0x02
- final val FLOW_COND_BRANCH : Byte = 0x03
- final val FLOW_BREAK : Byte = 0x04
- final val FLOW_CALL : Byte = 0x05
- final val FLOW_RETURN : Byte = 0x06
- final val FLOW_THROW : Byte = 0x07
- final val FLOW_count : Int = 0x08
-
- //########################################################################
- // Init methods for Opcode
-
- def opcode(that: OpCode, opcode: Int, string: String, code: Int,
- pop: Byte, push: Byte, inline: Byte, flow: Byte) {
- that.CEE_opcode = opcode
- that.CEE_string = string
- that.CEE_code = code.toShort
- that.CEE_pop = pop
- that.CEE_push = push
- that.CEE_inline = inline
- that.CEE_flow = flow
- that.CEE_length = that.length()
- that.CEE_popush = that.popush()
- }
-
- def length(code: Int): Byte = {
- if ((code & 0xFFFFFF00) == 0xFFFFFF00) return 1
- if ((code & 0xFFFFFF00) == 0xFFFFFE00) return 2
- return 0
- }
-
- //########################################################################
- // case OpCode
-
- /**
- * Adds two values and pushes the result onto the evaluation stack.
- */
- final val Add = new OpCode()
- opcode(Add, CEE_ADD, "add", 0xFFFFFF58, POP_1_1, PUSH_1, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Fills space if bytecodes are patched. No meaningful operation is performed
- * although a processing cycle can be consumed.
- */
- final val Nop = new OpCode()
- opcode(Nop, CEE_NOP, "nop", 0xFFFFFF00, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Signals the Common Language Infrastructure (CLI) to inform the debugger that
- * a break point has been tripped.
- */
- final val Break = new OpCode()
- opcode(Break, CEE_BREAK, "break" , 0xFFFFFF01, POP_NONE, PUSH_NONE , INLINE_NONE , FLOW_BREAK)
-
- /**
- * Loads the argument at index 0 onto the evaluation stack.
- */
- final val Ldarg_0 = new OpCode()
- opcode(Ldarg_0, CEE_LDARG_0 , "ldarg.0" , 0xFFFFFF02, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument at index 1 onto the evaluation stack.
- */
- final val Ldarg_1 = new OpCode()
- opcode(Ldarg_1, CEE_LDARG_1 , "ldarg.1" , 0xFFFFFF03, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument at index 2 onto the evaluation stack.
- */
- final val Ldarg_2 = new OpCode()
- opcode(Ldarg_2, CEE_LDARG_2 , "ldarg.2" , 0xFFFFFF04, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument at index 3 onto the evaluation stack.
- */
- final val Ldarg_3 = new OpCode()
- opcode(Ldarg_3, CEE_LDARG_3 , "ldarg.3" , 0xFFFFFF05, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 0 onto the evaluation stack.
- */
- final val Ldloc_0 = new OpCode()
- opcode(Ldloc_0, CEE_LDLOC_0 , "ldloc.0" , 0xFFFFFF06, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 1 onto the evaluation stack.
- */
- final val Ldloc_1 = new OpCode()
- opcode(Ldloc_1, CEE_LDLOC_1 , "ldloc.1" , 0xFFFFFF07, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 2 onto the evaluation stack.
- */
- final val Ldloc_2 = new OpCode()
- opcode(Ldloc_2, CEE_LDLOC_2 , "ldloc.2" , 0xFFFFFF08, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the local variable at index 3 onto the evaluation stack.
- */
- final val Ldloc_3 = new OpCode()
- opcode(Ldloc_3, CEE_LDLOC_3 , "ldloc.3" , 0xFFFFFF09, POP_NONE, PUSH_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 0.
- */
- final val Stloc_0 = new OpCode()
- opcode(Stloc_0, CEE_STLOC_0 , "stloc.0" , 0xFFFFFF0A, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 1.
- */
- final val Stloc_1 = new OpCode()
- opcode(Stloc_1, CEE_STLOC_1 , "stloc.1" , 0xFFFFFF0B, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 2.
- */
- final val Stloc_2 = new OpCode()
- opcode(Stloc_2, CEE_STLOC_2 , "stloc.2" , 0xFFFFFF0C, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 3.
- */
- final val Stloc_3 = new OpCode()
- opcode(Stloc_3, CEE_STLOC_3 , "stloc.3" , 0xFFFFFF0D, POP_1 , PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the argument (referenced by a specified short form index)
- * onto the evaluation stack.
- */
- final val Ldarg_S = new OpCode()
- opcode(Ldarg_S, CEE_LDARG_S , "ldarg.s" , 0xFFFFFF0E, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Load an argument address, in short form, onto the evaluation stack.
- */
- final val Ldarga_S = new OpCode()
- opcode(Ldarga_S, CEE_LDARGA_S , "ldarga.s" , 0xFFFFFF0F, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack,
- * short form.
- */
- final val Ldloc_S = new OpCode()
- opcode(Ldloc_S, CEE_LDLOC_S , "ldloc.s" , 0xFFFFFF11, POP_NONE, PUSH_1 , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Loads the address of the local variable at a specific index onto
- * the evaluation stack, short form.
- */
- final val Ldloca_S = new OpCode()
- opcode(Ldloca_S, CEE_LDLOCA_S , "ldloca.s" , 0xFFFFFF12, POP_NONE, PUSH_I , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot
- * at a specified index, short form.
- */
- final val Starg_S = new OpCode()
- opcode(Starg_S, CEE_STARG_S , "starg.s" , 0xFFFFFF10, POP_1 , PUSH_NONE , INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it
- * in a the local variable list at index (short form).
- */
- final val Stloc_S = new OpCode()
- opcode(Stloc_S, CEE_STLOC_S , "stloc.s" , 0xFFFFFF13, POP_1 , PUSH_NONE, INLINE_VARIABLE_S, FLOW_NEXT)
-
- /**
- * Pushes a null reference (type O) onto the evaluation stack.
- */
- final val Ldnull = new OpCode()
- opcode(Ldnull, CEE_LDNULL , "ldnull" , 0xFFFFFF14, POP_NONE, PUSH_REF , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of -1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_M1 = new OpCode()
- opcode(Ldc_I4_M1, CEE_LDC_I4_M1, "ldc.i4.m1", 0xFFFFFF15, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 0 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_0 = new OpCode()
- opcode(Ldc_I4_0, CEE_LDC_I4_0 , "ldc.i4.0" , 0xFFFFFF16, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_1 = new OpCode()
- opcode(Ldc_I4_1, CEE_LDC_I4_1 , "ldc.i4.1" , 0xFFFFFF17, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 2 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_2 = new OpCode()
- opcode(Ldc_I4_2, CEE_LDC_I4_2 , "ldc.i4.2" , 0xFFFFFF18, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 3 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_3 = new OpCode()
- opcode(Ldc_I4_3, CEE_LDC_I4_3 , "ldc.i4.3" , 0xFFFFFF19, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 4 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_4 = new OpCode()
- opcode(Ldc_I4_4, CEE_LDC_I4_4 , "ldc.i4.4" , 0xFFFFFF1A, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 5 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_5 = new OpCode()
- opcode(Ldc_I4_5, CEE_LDC_I4_5 , "ldc.i4.5" , 0xFFFFFF1B, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 6 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_6 = new OpCode()
- opcode(Ldc_I4_6, CEE_LDC_I4_6 , "ldc.i4.6", 0xFFFFFF1C, POP_NONE, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 7 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_7 = new OpCode()
- opcode(Ldc_I4_7, CEE_LDC_I4_7 , "ldc.i4.7", 0xFFFFFF1D, POP_NONE , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the integer value of 8 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_8 = new OpCode()
- opcode(Ldc_I4_8, CEE_LDC_I4_8 , "ldc.i4.8", 0xFFFFFF1E, POP_NONE , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes the supplied int8 value onto the evaluation stack as an int32, short form.
- */
- final val Ldc_I4_S = new OpCode()
- opcode(Ldc_I4_S, CEE_LDC_I4_S , "ldc.i4.s", 0xFFFFFF1F, POP_NONE , PUSH_I, INLINE_I_S, FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type int32 onto the evaluation stack as an int32.
- */
- final val Ldc_I4 = new OpCode()
- opcode(Ldc_I4, CEE_LDC_I4, "ldc.i4" , 0xFFFFFF20, POP_NONE , PUSH_I, INLINE_I , FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type int64 onto the evaluation stack as an int64.
- */
- final val Ldc_I8 = new OpCode()
- opcode(Ldc_I8, CEE_LDC_I8, "ldc.i8" , 0xFFFFFF21, POP_NONE , PUSH_I8, INLINE_I8 , FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type float32 onto the evaluation stack as type F (float).
- */
- final val Ldc_R4 = new OpCode()
- opcode(Ldc_R4, CEE_LDC_R4, "ldc.r4" , 0xFFFFFF22, POP_NONE , PUSH_R4, INLINE_R , FLOW_NEXT)
-
- /**
- * Pushes a supplied value of type float64 onto the evaluation stack as type F (float).
- */
- final val Ldc_R8 = new OpCode()
- opcode(Ldc_R8, CEE_LDC_R8, "ldc.r8" , 0xFFFFFF23, POP_NONE , PUSH_R8, INLINE_R8 , FLOW_NEXT)
-
- /**
- * Copies the current topmost value on the evaluation stack, and then pushes the copy
- * onto the evaluation stack.
- */
- final val Dup = new OpCode()
- opcode(Dup, CEE_DUP , "dup" , 0xFFFFFF25, POP_1 , PUSH_1_1 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Removes the value currently on top of the evaluation stack.
- */
- final val Pop = new OpCode()
- opcode(Pop, CEE_POP , "pop" , 0xFFFFFF26, POP_1 , PUSH_NONE , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Exits current method and jumps to specified method.
- */
- final val Jmp = new OpCode()
- opcode(Jmp, CEE_JMP , "jmp" , 0xFFFFFF27, POP_NONE , PUSH_NONE , INLINE_METHOD, FLOW_CALL)
-
- /**
- * Calls the method indicated by the passed method descriptor.
- */
- final val Call = new OpCode()
- opcode(Call, CEE_CALL , "call" , 0xFFFFFF28, POP_SPECIAL, PUSH_SPECIAL, INLINE_METHOD , FLOW_CALL)
-
- /**
- * constrained prefix
- */
- final val Constrained = new OpCode()
-opcode(Constrained, CEE_CONSTRAINED , "constrained." , 0xFFFFFE16, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * readonly prefix
- */
- final val Readonly = new OpCode()
-opcode(Readonly, CEE_READONLY , "readonly." , 0xFFFFFE1E, POP_NONE, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Calls the method indicated on the evaluation stack (as a pointer to an entry point)
- * with arguments described by a calling convention.
- */
- final val Calli = new OpCode()
- opcode(Calli, CEE_CALLI, "calli" , 0xFFFFFF29, POP_SPECIAL, PUSH_SPECIAL, INLINE_SIGNATURE , FLOW_CALL)
-
- /**
- * Returns from the current method, pushing a return value (if present) from the caller's
- * evaluation stack onto the callee's evaluation stack.
- */
- final val Ret = new OpCode()
- opcode(Ret, CEE_RET , "ret" , 0xFFFFFF2A, POP_SPECIAL, PUSH_NONE, INLINE_NONE , FLOW_RETURN)
-
- /**
- * Unconditionally transfers control to a target instruction (short form).
- */
- final val Br_S = new OpCode()
- opcode(Br_S, CEE_BR_S , "br.s" , 0xFFFFFF2B, POP_NONE, PUSH_NONE, INLINE_TARGET_S , FLOW_BRANCH)
-
- /**
- * Transfers control to a target instruction if value is false, a null reference, or zero.
- */
- final val Brfalse_S = new OpCode()
- opcode(Brfalse_S, CEE_BRFALSE_S,"brfalse.s", 0xFFFFFF2C, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if value is true, not null, or non-zero.
- */
- final val Brtrue_S = new OpCode()
- opcode(Brtrue_S, CEE_BRTRUE_S , "brtrue.s", 0xFFFFFF2D, POP_I, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if two values are equal.
- */
- final val Beq_S = new OpCode()
- opcode(Beq_S, CEE_BEQ_S, "beq.s", 0xFFFFFF2E, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * or equal to the second value.
- */
- final val Bge_S = new OpCode()
- opcode(Bge_S, CEE_BGE_S, "bge.s", 0xFFFFFF2F, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value.
- */
- final val Bgt_S = new OpCode()
- opcode(Bgt_S, CEE_BGT_S, "bgt.s" , 0xFFFFFF30, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value.
- */
- final val Ble_S = new OpCode()
- opcode(Ble_S, CEE_BLE_S, "ble.s" , 0xFFFFFF31, POP_1_1 , PUSH_NONE, INLINE_TARGET_S , FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value.
- */
- final val Blt_S = new OpCode()
- opcode(Blt_S, CEE_BLT_S, "blt.s", 0xFFFFFF32, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) when two unsigned integer values
- * or unordered float values are not equal.
- */
- final val Bne_Un_S = new OpCode()
- opcode(Bne_Un_S, CEE_BNE_UN_S, "bne.un.s", 0xFFFFFF33, POP_1_1 , PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greather
- * than the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un_S = new OpCode()
- opcode(Bge_Un_S, CEE_BGE_UN_S, "bge.un.s", 0xFFFFFF34, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un_S = new OpCode()
- opcode(Bgt_Un_S, CEE_BGT_UN_S, "bgt.un.s", 0xFFFFFF35, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un_S = new OpCode()
- opcode(Ble_Un_S, CEE_BLE_UN_S , "ble.un.s", 0xFFFFFF36, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un_S = new OpCode()
- opcode(Blt_Un_S, CEE_BLT_UN_S, "blt.un.s", 0xFFFFFF37, POP_1_1, PUSH_NONE, INLINE_TARGET_S, FLOW_COND_BRANCH)
-
- /**
- * Unconditionally transfers control to a target instruction.
- */
- final val Br = new OpCode()
- opcode(Br, CEE_BR , "br" , 0xFFFFFF38, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH)
-
- /**
- * Transfers control to a target instruction if value is false, a null reference
- * (Nothing in Visual Basic), or zero.
- */
- final val Brfalse = new OpCode()
- opcode(Brfalse, CEE_BRFALSE, "brfalse", 0xFFFFFF39, POP_I, PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if value is true, not null, or non-zero.
- */
- final val Brtrue = new OpCode()
- opcode(Brtrue, CEE_BRTRUE , "brtrue", 0xFFFFFF3A, POP_I , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if two values are equal.
- */
- final val Beq = new OpCode()
- opcode(Beq, CEE_BEQ, "beq", 0xFFFFFF3B, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greater than or
- * equal to the second value.
- */
- final val Bge = new OpCode()
- opcode(Bge, CEE_BGE, "bge", 0xFFFFFF3C, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greater than the second value.
- */
- final val Bgt = new OpCode()
- opcode(Bgt, CEE_BGT, "bgt", 0xFFFFFF3D, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal
- * to the second value.
- */
- final val Ble = new OpCode()
- opcode(Ble, CEE_BLE, "ble", 0xFFFFFF3E, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value.
- */
- final val Blt = new OpCode()
- opcode(Blt, CEE_BLT, "blt", 0xFFFFFF3F, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction when two unsigned integer values or
- * unordered float values are not equal.
- */
- final val Bne_Un = new OpCode()
- opcode(Bne_Un, CEE_BNE_UN , "bne.un", 0xFFFFFF40, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greather than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un = new OpCode()
- opcode(Bge_Un, CEE_BGE_UN , "bge.un", 0xFFFFFF41, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is greater than the
- * second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un = new OpCode()
- opcode(Bgt_Un, CEE_BGT_UN , "bgt.un", 0xFFFFFF42, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal to
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un = new OpCode()
- opcode(Ble_Un, CEE_BLE_UN , "ble.un" , 0xFFFFFF43, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value,
- * when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un = new OpCode()
- opcode(Blt_Un, CEE_BLT_UN , "blt.un", 0xFFFFFF44, POP_1_1 , PUSH_NONE, INLINE_TARGET, FLOW_COND_BRANCH)
-
- /**
- * Implements a jump table.
- */
- final val Switch = new OpCode()
- opcode(Switch, CEE_SWITCH , "switch", 0xFFFFFF45, POP_I , PUSH_NONE, INLINE_SWITCH, FLOW_COND_BRANCH)
-
- /**
- * Loads a value of type int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I1 = new OpCode()
- opcode(Ldind_I1, CEE_LDIND_I1 , "ldind.i1" , 0xFFFFFF46, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I2 = new OpCode()
- opcode(Ldind_I2, CEE_LDIND_I2 , "ldind.i2" , 0xFFFFFF48, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I4 = new OpCode()
- opcode(Ldind_I4, CEE_LDIND_I4 , "ldind.i4" , 0xFFFFFF4A, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type int64 as an int64 onto the evaluation stack indirectly.
- */
- final val Ldind_I8 = new OpCode()
- opcode(Ldind_I8, CEE_LDIND_I8 , "ldind.i8" , 0xFFFFFF4C, POP_I , PUSH_I8 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type natural int as a natural int onto the evaluation stack indirectly.
- */
- final val Ldind_I = new OpCode()
- opcode(Ldind_I, CEE_LDIND_I , "ldind.i" , 0xFFFFFF4D, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R4 = new OpCode()
- opcode(Ldind_R4, CEE_LDIND_R4 , "ldind.r4" , 0xFFFFFF4E, POP_I , PUSH_R4 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R8 = new OpCode()
- opcode(Ldind_R8, CEE_LDIND_R8 , "ldind.r8" , 0xFFFFFF4F, POP_I , PUSH_R8 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly.
- */
- final val Ldind_Ref = new OpCode()
- opcode(Ldind_Ref, CEE_LDIND_REF, "ldind.ref", 0xFFFFFF50, POP_I , PUSH_REF, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U1 = new OpCode()
- opcode(Ldind_U1, CEE_LDIND_U1 , "ldind.u1" , 0xFFFFFF47, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U2 = new OpCode()
- opcode(Ldind_U2, CEE_LDIND_U2 , "ldind.u2" , 0xFFFFFF49, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U4 = new OpCode()
- opcode(Ldind_U4, CEE_LDIND_U4 , "ldind.u4" , 0xFFFFFF4B, POP_I , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a object reference value at a supplied address.
- */
- final val Stind_Ref = new OpCode()
- opcode(Stind_Ref, CEE_STIND_REF, "stind.ref", 0xFFFFFF51, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int8 at a supplied address.
- */
- final val Stind_I1 = new OpCode()
- opcode(Stind_I1, CEE_STIND_I1 , "stind.i1", 0xFFFFFF52, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int16 at a supplied address.
- */
- final val Stind_I2 = new OpCode()
- opcode(Stind_I2, CEE_STIND_I2 , "stind.i2", 0xFFFFFF53, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int32 at a supplied address.
- */
- final val Stind_I4 = new OpCode()
- opcode(Stind_I4, CEE_STIND_I4 , "stind.i4", 0xFFFFFF54, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type int64 at a supplied address.
- */
- final val Stind_I8 = new OpCode()
- opcode(Stind_I8, CEE_STIND_I8 , "stind.i8", 0xFFFFFF55, POP_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type float32 at a supplied address.
- */
- final val Stind_R4 = new OpCode()
- opcode(Stind_R4, CEE_STIND_R4 , "stind.r4", 0xFFFFFF56, POP_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Stores a value of type float64 at a supplied address.
- */
- final val Stind_R8 = new OpCode()
- opcode(Stind_R8, CEE_STIND_R8 , "stind.r8", 0xFFFFFF57, POP_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Subtracts one value from another and pushes the result onto the evaluation stack.
- */
- final val Sub = new OpCode()
- opcode(Sub, CEE_SUB, "sub" , 0xFFFFFF59, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Multiplies two values and pushes the result on the evaluation stack.
- */
- final val Mul = new OpCode()
- opcode(Mul, CEE_MUL, "mul" , 0xFFFFFF5A, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two values and pushes the result as a floating-point (type F) or
- * quotient (type int32) onto the evaluation stack.
- */
- final val Div = new OpCode()
- opcode(Div, CEE_DIV, "div" , 0xFFFFFF5B, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack.
- */
- final val Div_Un = new OpCode()
- opcode(Div_Un, CEE_DIV_UN, "div.un" , 0xFFFFFF5C, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two values and pushes the remainder onto the evaluation stack.
- */
- final val Rem = new OpCode()
- opcode(Rem, CEE_REM , "rem" , 0xFFFFFF5D, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Divides two unsigned values and pushes the remainder onto the evaluation stack.
- */
- final val Rem_Un = new OpCode()
- opcode(Rem_Un, CEE_REM_UN, "rem.un" , 0xFFFFFF5E, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
- */
- final val And = new OpCode()
- opcode(And, CEE_AND, "and" , 0xFFFFFF5F, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compute the bitwise complement of the two integer values on top of the stack and
- * pushes the result onto the evaluation stack.
- */
- final val Or = new OpCode()
- opcode(Or, CEE_OR , "or" , 0xFFFFFF60, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Computes the bitwise XOR of the top two values on the evaluation stack,
- * pushing the result onto the evaluation stack.
- */
- final val Xor = new OpCode()
- opcode(Xor, CEE_XOR, "xor" , 0xFFFFFF61, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Shifts an integer value to the left (in zeroes) by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shl = new OpCode()
- opcode(Shl, CEE_SHL, "shl" , 0xFFFFFF62, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Shifts an integer value (in sign) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr = new OpCode()
- opcode(Shr, CEE_SHR, "shr" , 0xFFFFFF63, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr_Un = new OpCode()
- opcode(Shr_Un, CEE_SHR_UN, "shr.un" , 0xFFFFFF64, POP_1_1, PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Negates a value and pushes the result onto the evaluation stack.
- */
- final val Neg = new OpCode()
- opcode(Neg, CEE_NEG , "neg" , 0xFFFFFF65, POP_1 , PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Computes the bitwise complement of the integer value on top of the stack and pushes
- * the result onto the evaluation stack as the same type.
- */
- final val Not = new OpCode()
- opcode(Not, CEE_NOT , "not" , 0xFFFFFF66, POP_1 , PUSH_1 , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32.
- */
- final val Conv_I1 = new OpCode()
- opcode(Conv_I1, CEE_CONV_I1, "conv.i1", 0xFFFFFF67, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32.
- */
- final val Conv_I2 = new OpCode()
- opcode(Conv_I2, CEE_CONV_I2, "conv.i2", 0xFFFFFF68, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int32.
- */
- final val Conv_I4 = new OpCode()
- opcode(Conv_I4, CEE_CONV_I4, "conv.i4", 0xFFFFFF69, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to int64.
- */
- final val Conv_I8 = new OpCode()
- opcode(Conv_I8, CEE_CONV_I8, "conv.i8", 0xFFFFFF6A, POP_1 , PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to float32.
- */
- final val Conv_R4 = new OpCode()
- opcode(Conv_R4, CEE_CONV_R4, "conv.r4", 0xFFFFFF6B, POP_1 , PUSH_R4, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to float64.
- */
- final val Conv_R8 = new OpCode()
- opcode(Conv_R8, CEE_CONV_R8, "conv.r8", 0xFFFFFF6C, POP_1 , PUSH_R8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32.
- */
- final val Conv_U4 = new OpCode()
- opcode(Conv_U4, CEE_CONV_U4, "conv.u4", 0xFFFFFF6D, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64.
- */
- final val Conv_U8 = new OpCode()
- opcode(Conv_U8, CEE_CONV_U8, "conv.u8", 0xFFFFFF6E, POP_1 , PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Calls a late-bound method on an object, pushing the return value onto the evaluation stack.
- */
- final val Callvirt = new OpCode()
- opcode(Callvirt, CEE_CALLVIRT, "callvirt", 0xFFFFFF6F,POP_SPECIAL,PUSH_SPECIAL,INLINE_METHOD,FLOW_CALL)
-
- /**
- * Copies the value type located at the address of an object (type &, * or natural int)
- * to the address of the destination object (type &, * or natural int).
- */
- final val Cpobj = new OpCode()
- opcode(Cpobj, CEE_CPOBJ , "cpobj" , 0xFFFFFF70, POP_I_I , PUSH_NONE, INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Copies the value type object pointed to by an address to the top of the evaluation stack.
- */
- final val Ldobj = new OpCode()
- opcode(Ldobj, CEE_LDOBJ , "ldobj" , 0xFFFFFF71, POP_I , PUSH_1 , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Pushes a new object reference to a string literal stored in the metadata.
- */
- final val Ldstr = new OpCode()
- opcode(Ldstr, CEE_LDSTR , "ldstr" , 0xFFFFFF72, POP_NONE , PUSH_REF , INLINE_STRING, FLOW_NEXT)
-
- /**
- * Creates a new object or a new instance of a value type, pushing an object reference
- * (type O) onto the evaluation stack.
- */
- final val Newobj = new OpCode()
- opcode(Newobj, CEE_NEWOBJ, "newobj", 0xFFFFFF73, POP_SPECIAL , PUSH_REF , INLINE_METHOD, FLOW_CALL)
-
- /**
- * Attempts to cast an object passed by reference to the specified class.
- */
- final val Castclass = new OpCode()
- opcode(Castclass, CEE_CASTCLASS, "castclass", 0xFFFFFF74, POP_REF , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Tests whether an object reference (type O) is an instance of a particular class.
- */
- final val Isinst = new OpCode()
- opcode(Isinst, CEE_ISINST , "isinst" , 0xFFFFFF75, POP_REF , PUSH_I , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Converts the unsigned integer value on top of the evaluation stack to float32.
- */
- final val Conv_R_Un = new OpCode()
- opcode(Conv_R_Un, CEE_CONV_R_UN, "conv.r.un", 0xFFFFFF76, POP_1 , PUSH_R8 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the boxed representation of a value type to its unboxed form.
- */
- final val Unbox = new OpCode()
- opcode(Unbox, CEE_UNBOX , "unbox" , 0xFFFFFF79, POP_REF , PUSH_I , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Throws the exception object currently on the evaluation stack.
- */
- final val Throw = new OpCode()
- opcode(Throw, CEE_THROW , "throw" , 0xFFFFFF7A, POP_REF , PUSH_NONE, INLINE_NONE , FLOW_THROW)
-
- /**
- * Finds the value of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldfld = new OpCode()
- opcode(Ldfld, CEE_LDFLD , "ldfld" , 0xFFFFFF7B, POP_REF , PUSH_1 , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Finds the address of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldflda = new OpCode()
- opcode(Ldflda, CEE_LDFLDA , "ldflda" , 0xFFFFFF7C, POP_REF , PUSH_I , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Pushes the value of a static field onto the evaluation stack.
- */
- final val Ldsfld = new OpCode()
- opcode(Ldsfld, CEE_LDSFLD , "ldsfld" , 0xFFFFFF7E, POP_NONE , PUSH_1 , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Pushes the address of a static field onto the evaluation stack.
- */
- final val Ldsflda = new OpCode()
- opcode(Ldsflda, CEE_LDSFLDA, "ldsflda", 0xFFFFFF7F, POP_NONE , PUSH_I , INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Replaces the value stored in the field of an object reference or pointer with a new value.
- */
- final val Stfld = new OpCode()
- opcode(Stfld, CEE_STFLD , "stfld" , 0xFFFFFF7D, POP_REF_1, PUSH_NONE, INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Replaces the value of a static field with a value from the evaluation stack.
- */
- final val Stsfld = new OpCode()
- opcode(Stsfld, CEE_STSFLD , "stsfld" , 0xFFFFFF80, POP_1 , PUSH_NONE, INLINE_FIELD , FLOW_NEXT)
-
- /**
- * Copies a value of a specified type from the evaluation stack into a supplied memory address.
- */
- final val Stobj = new OpCode()
- opcode(Stobj, CEE_STOBJ , "stobj" , 0xFFFFFF81, POP_I_1, PUSH_NONE, INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1_Un = new OpCode()
- opcode(Conv_Ovf_I1_Un, CEE_CONV_OVF_I1_UN, "conv.ovf.i1.un", 0xFFFFFF82, POP_1,PUSH_I,INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2_Un = new OpCode()
- opcode(Conv_Ovf_I2_Un, CEE_CONV_OVF_I2_UN, "conv.ovf.i2.un", 0xFFFFFF83,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4_Un = new OpCode()
- opcode(Conv_Ovf_I4_Un, CEE_CONV_OVF_I4_UN, "conv.ovf.i4.un", 0xFFFFFF84,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8_Un = new OpCode()
- opcode(Conv_Ovf_I8_Un, CEE_CONV_OVF_I8_UN, "conv.ovf.i8.un", 0xFFFFFF85,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I_Un = new OpCode()
- opcode(Conv_Ovf_I_Un, CEE_CONV_OVF_I_UN , "conv.ovf.i.un" , 0xFFFFFF8A,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1_Un = new OpCode()
- opcode(Conv_Ovf_U1_Un, CEE_CONV_OVF_U1_UN, "conv.ovf.u1.un", 0xFFFFFF86,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2_Un = new OpCode()
- opcode(Conv_Ovf_U2_Un, CEE_CONV_OVF_U2_UN, "conv.ovf.u2.un", 0xFFFFFF87,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4_Un = new OpCode()
- opcode(Conv_Ovf_U4_Un, CEE_CONV_OVF_U4_UN, "conv.ovf.u4.un", 0xFFFFFF88,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8_Un = new OpCode()
- opcode(Conv_Ovf_U8_Un, CEE_CONV_OVF_U8_UN, "conv.ovf.u8.un", 0xFFFFFF89,POP_1,PUSH_I8, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U_Un = new OpCode()
- opcode(Conv_Ovf_U_Un, CEE_CONV_OVF_U_UN , "conv.ovf.u.un" , 0xFFFFFF8B,POP_1,PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts a value type to an object reference (type O).
- */
- final val Box = new OpCode()
- opcode(Box, CEE_BOX , "box" , 0xFFFFFF8C, POP_1 , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Pushes an object reference to a new zero-based, one-dimensional array whose elements
- * are of a specific type onto the evaluation stack.
- */
- final val Newarr = new OpCode()
- opcode(Newarr, CEE_NEWARR, "newarr" , 0xFFFFFF8D, POP_I , PUSH_REF , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Pushes the number of elements of a zero-based, one-dimensional array
- * onto the evaluation stack.
- */
- final val Ldlen = new OpCode()
- opcode(Ldlen, CEE_LDLEN, "ldlen", 0xFFFFFF8E, POP_REF, PUSH_I,INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the address of the array element at a specified array index onto
- * the top of the evaluation stack as type & (managed pointer).
- */
- final val Ldelema = new OpCode()
- opcode(Ldelema, CEE_LDELEMA, "ldelema" , 0xFFFFFF8F, POP_REF_I, PUSH_I, INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Loads the element with type natural int at a specified array index onto the top
- * of the evaluation stack as a natural int.
- */
- final val Ldelem_I = new OpCode()
- opcode(Ldelem_I, CEE_LDELEM_I, "ldelem.i" , 0xFFFFFF97, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int8 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I1 = new OpCode()
- opcode(Ldelem_I1, CEE_LDELEM_I1, "ldelem.i1" , 0xFFFFFF90, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int16 at a specified array index onto the top of
- * the evaluation stack as an int32.
- */
- final val Ldelem_I2 = new OpCode()
- opcode(Ldelem_I2, CEE_LDELEM_I2, "ldelem.i2" , 0xFFFFFF92, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int32 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I4 = new OpCode()
- opcode(Ldelem_I4, CEE_LDELEM_I4, "ldelem.i4" , 0xFFFFFF94, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type int64 at a specified array index onto the top of the
- * evaluation stack as an int64.
- */
- final val Ldelem_I8 = new OpCode()
- opcode(Ldelem_I8, CEE_LDELEM_I8, "ldelem.i8" , 0xFFFFFF96, POP_REF_I, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type float32 at a specified array index onto the top of the
- * evaluation stack as type F (float)
- */
- final val Ldelem_R4 = new OpCode()
- opcode(Ldelem_R4, CEE_LDELEM_R4, "ldelem.r4" , 0xFFFFFF98, POP_REF_I, PUSH_R4, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type float64 at a specified array index onto the top of the
- * evaluation stack as type F (float) .
- */
- final val Ldelem_R8 = new OpCode()
- opcode(Ldelem_R8, CEE_LDELEM_R8, "ldelem.r8" , 0xFFFFFF99, POP_REF_I, PUSH_R8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element containing an object reference at a specified array index onto
- * the top of the evaluation stack as type O (object reference).
- */
- final val Ldelem_Ref = new OpCode()
- opcode(Ldelem_Ref, CEE_LDELEM_REF, "ldelem.ref", 0xFFFFFF9A, POP_REF_I, PUSH_REF, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type unsigned int8 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U1 = new OpCode()
- opcode(Ldelem_U1, CEE_LDELEM_U1, "ldelem.u1" , 0xFFFFFF91, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type unsigned int16 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U2 = new OpCode()
- opcode(Ldelem_U2, CEE_LDELEM_U2, "ldelem.u2" , 0xFFFFFF93, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Loads the element with type unsigned int32 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U4 = new OpCode()
- opcode(Ldelem_U4, CEE_LDELEM_U4, "ldelem.u4" , 0xFFFFFF95, POP_REF_I, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the natural int value on
- * the evaluation stack.
- */
- final val Stelem_I = new OpCode()
- opcode(Stelem_I, CEE_STELEM_I, "stelem.i", 0xFFFFFF9B, POP_REF_I_I, PUSH_NONE, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int8 value on the evaluation stack.
- */
- final val Stelem_I1 = new OpCode()
- opcode(Stelem_I1, CEE_STELEM_I1, "stelem.i1", 0xFFFFFF9C, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int16 value on the evaluation stack.
- */
- final val Stelem_I2 = new OpCode()
- opcode(Stelem_I2, CEE_STELEM_I2, "stelem.i2", 0xFFFFFF9D, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int32 value on the evaluation stack.
- */
- final val Stelem_I4 = new OpCode()
- opcode(Stelem_I4, CEE_STELEM_I4, "stelem.i4", 0xFFFFFF9E, POP_REF_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the int64 value on the evaluation stack.
- */
- final val Stelem_I8 = new OpCode()
- opcode(Stelem_I8, CEE_STELEM_I8,"stelem.i8", 0xFFFFFF9F, POP_REF_I_I8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the float32 value on the evaluation stack.
- */
- final val Stelem_R4 = new OpCode()
- opcode(Stelem_R4, CEE_STELEM_R4,"stelem.r4", 0xFFFFFFA0, POP_REF_I_R4, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the float64 value on the evaluation stack.
- */
- final val Stelem_R8 = new OpCode()
- opcode(Stelem_R8, CEE_STELEM_R8,"stelem.r8", 0xFFFFFFA1, POP_REF_I_R8, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Replaces the array element at a given index with the object ref value (type O)
- * on the evaluation stack.
- */
- final val Stelem_Ref = new OpCode()
- opcode(Stelem_Ref, CEE_STELEM_REF,"stelem.ref",0xFFFFFFA2,POP_REF_I_REF,PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1 = new OpCode()
- opcode(Conv_Ovf_I1, CEE_CONV_OVF_I1, "conv.ovf.i1", 0xFFFFFFB3, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int16 and
- * extending it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2 = new OpCode()
- opcode(Conv_Ovf_I2, CEE_CONV_OVF_I2, "conv.ovf.i2", 0xFFFFFFB5, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4 = new OpCode()
- opcode(Conv_Ovf_I4, CEE_CONV_OVF_I4, "conv.ovf.i4", 0xFFFFFFB7, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8 = new OpCode()
- opcode(Conv_Ovf_I8, CEE_CONV_OVF_I8, "conv.ovf.i8", 0xFFFFFFB9, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1 = new OpCode()
- opcode(Conv_Ovf_U1, CEE_CONV_OVF_U1, "conv.ovf.u1", 0xFFFFFFB4, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2 = new OpCode()
- opcode(Conv_Ovf_U2, CEE_CONV_OVF_U2, "conv.ovf.u2", 0xFFFFFFB6, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4 = new OpCode()
- opcode(Conv_Ovf_U4, CEE_CONV_OVF_U4, "conv.ovf.u4", 0xFFFFFFB8, POP_1, PUSH_I , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8 = new OpCode()
- opcode(Conv_Ovf_U8, CEE_CONV_OVF_U8, "conv.ovf.u8", 0xFFFFFFBA, POP_1, PUSH_I8, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Retrieves the address (type &) embedded in a typed reference.
- */
- final val Refanyval = new OpCode()
- opcode(Refanyval, CEE_REFANYVAL, "refanyval", 0xFFFFFFC2, POP_1, PUSH_I , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Retrieves the type token embedded in a typed reference .
- */
- final val Refanytype = new OpCode()
- opcode(Refanytype, CEE_REFANYTYPE, "refanytype", 0xFFFFFE1D, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Throws ArithmeticException if value is not a finite number.
- */
- final val Ckfinite = new OpCode()
- opcode(Ckfinite, CEE_CKFINITE, "ckfinite" , 0xFFFFFFC3, POP_1, PUSH_R8 , INLINE_NONE , FLOW_NEXT)
-
- /**
- * Pushes a typed reference to an instance of a specific type onto the evaluation stack.
- */
- final val Mkrefany = new OpCode()
- opcode(Mkrefany, CEE_MKREFANY, "mkrefany" , 0xFFFFFFC6, POP_I, PUSH_1 , INLINE_TYPE , FLOW_NEXT)
-
- /**
- * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack.
- */
- final val Ldtoken = new OpCode()
- opcode(Ldtoken, CEE_LDTOKEN , "ldtoken" , 0xFFFFFFD0, POP_NONE, PUSH_I, INLINE_TOKEN , FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32.
- */
- final val Conv_U1 = new OpCode()
- opcode(Conv_U1, CEE_CONV_U1 , "conv.u1" , 0xFFFFFFD2, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32.
- */
- final val Conv_U2 = new OpCode()
- opcode(Conv_U2, CEE_CONV_U2 , "conv.u2" , 0xFFFFFFD1, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to natural int.
- */
- final val Conv_I = new OpCode()
- opcode(Conv_I, CEE_CONV_I , "conv.i" , 0xFFFFFFD3, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I = new OpCode()
- opcode(Conv_Ovf_I, CEE_CONV_OVF_I , "conv.ovf.i", 0xFFFFFFD4, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U = new OpCode()
- opcode(Conv_Ovf_U, CEE_CONV_OVF_U , "conv.ovf.u", 0xFFFFFFD5, POP_1, PUSH_I, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Adds two integers, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf = new OpCode()
- opcode(Add_Ovf, CEE_ADD_OVF , "add.ovf" , 0xFFFFFFD6, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Adds two unsigned integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf_Un = new OpCode()
- opcode(Add_Ovf_Un, CEE_ADD_OVF_UN , "add.ovf.un", 0xFFFFFFD7, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Multiplies two integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Mul_Ovf = new OpCode()
- opcode(Mul_Ovf, CEE_MUL_OVF , "mul.ovf" , 0xFFFFFFD8, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Multiplies two unsigned integer values , performs an overflow check ,
- * and pushes the result onto the evaluation stack.
- */
- final val Mul_Ovf_Un = new OpCode()
- opcode(Mul_Ovf_Un, CEE_MUL_OVF_UN , "mul.ovf.un", 0xFFFFFFD9, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Subtracts one integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf = new OpCode()
- opcode(Sub_Ovf, CEE_SUB_OVF , "sub.ovf" , 0xFFFFFFDA, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Subtracts one unsigned integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf_Un = new OpCode()
- opcode(Sub_Ovf_Un, CEE_SUB_OVF_UN, "sub.ovf.un", 0xFFFFFFDB, POP_1_1, PUSH_1, INLINE_NONE , FLOW_NEXT)
-
- /**
- * Transfers control from the fault or finally clause of an exception block back to
- * the Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfinally = new OpCode()
- opcode(Endfinally, CEE_ENDFINALLY, "endfinally", 0xFFFFFFDC, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_RETURN)
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a specific target instruction.
- */
- final val Leave = new OpCode()
- opcode(Leave, CEE_LEAVE, "leave", 0xFFFFFFDD, POP_NONE, PUSH_NONE, INLINE_TARGET, FLOW_BRANCH)
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a target instruction (short form).
- */
- final val Leave_S = new OpCode()
- opcode(Leave_S, CEE_LEAVE_S, "leave.s", 0xFFFFFFDE, POP_NONE, PUSH_NONE, INLINE_TARGET_S, FLOW_BRANCH)
-
- /**
- * Stores a value of type natural int at a supplied address.
- */
- final val Stind_I = new OpCode()
- opcode(Stind_I, CEE_STIND_I, "stind.i", 0xFFFFFFDF, POP_I_I , PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Converts the value on top of the evaluation stack to unsigned natural int,
- * and extends it to natural int.
- */
- final val Conv_U = new OpCode()
- opcode(Conv_U, CEE_CONV_U, "conv.u", 0xFFFFFFE0, POP_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Returns an unmanaged pointer to the argument list of the current method.
- */
- final val Arglist = new OpCode()
- opcode(Arglist, CEE_ARGLIST, "arglist" , 0xFFFFFE00, POP_NONE, PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two values. If they are equal, the integer value 1 (int32) is pushed
- * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Ceq = new OpCode()
- opcode(Ceq, CEE_CEQ, "ceq", 0xFFFFFE01, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two values. If the first value is greater than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt = new OpCode()
- opcode(Cgt, CEE_CGT, "cgt", 0xFFFFFE02, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two unsigned or unordered values. If the first value is greater than
- * the second, the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt_Un = new OpCode()
- opcode(Cgt_Un, CEE_CGT_UN, "cgt.un", 0xFFFFFE03, POP_1_1 , PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares two values. If the first value is less than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt = new OpCode()
- opcode(Clt, CEE_CLT, "clt" , 0xFFFFFE04, POP_1_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Compares the unsigned or unordered values value1 and value2. If value1 is
- * less than value2, then the integer value 1 (int32) is pushed onto the
- * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt_Un = new OpCode()
- opcode(Clt_Un, CEE_CLT_UN , "clt.un" , 0xFFFFFE05, POP_1_1 , PUSH_I , INLINE_NONE, FLOW_NEXT)
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a specific method onto the evaluation stack.
- */
- final val Ldftn = new OpCode()
- opcode(Ldftn, CEE_LDFTN , "ldftn" , 0xFFFFFE06, POP_NONE, PUSH_I , INLINE_METHOD, FLOW_NEXT)
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a particular virtual method associated with a specified object onto the evaluation stack.
- */
- final val Ldvirtftn = new OpCode()
- opcode(Ldvirtftn, CEE_LDVIRTFTN, "ldvirtftn", 0xFFFFFE07, POP_REF , PUSH_I , INLINE_METHOD, FLOW_NEXT)
-
- /**
- * Loads an argument (referenced by a specified index value) onto the stack.
- */
- final val Ldarg = new OpCode()
- opcode(Ldarg, CEE_LDARG , "ldarg" , 0xFFFFFE09, POP_NONE, PUSH_1 , INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Load an argument address onto the evaluation stack.
- */
- final val Ldarga = new OpCode()
- opcode(Ldarga, CEE_LDARGA , "ldarga", 0xFFFFFE0A, POP_NONE, PUSH_I, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloc = new OpCode()
- opcode(Ldloc, CEE_LDLOC, "ldloc", 0xFFFFFE0C, POP_NONE, PUSH_1 , INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Loads the address of the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloca = new OpCode()
- opcode(Ldloca, CEE_LDLOCA, "ldloca", 0xFFFFFE0D, POP_NONE, PUSH_I, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot at a specified index.
- */
- final val Starg = new OpCode()
- opcode(Starg, CEE_STARG, "starg", 0xFFFFFE0B, POP_1 , PUSH_NONE, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it in a
- * the local variable list at a specified index.
- */
- final val Stloc = new OpCode()
- opcode(Stloc, CEE_STLOC, "stloc", 0xFFFFFE0E, POP_1 , PUSH_NONE, INLINE_VARIABLE , FLOW_NEXT)
-
- /**
- * Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
- */
- final val Localloc = new OpCode()
- opcode(Localloc, CEE_LOCALLOC, "localloc" , 0xFFFFFE0F, POP_I, PUSH_I, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Transfers control from the filter clause of an exception back to the
- * Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfilter = new OpCode()
- opcode(Endfilter, CEE_ENDFILTER, "endfilter" , 0xFFFFFE11, POP_I , PUSH_NONE, INLINE_NONE, FLOW_RETURN)
-
- /**
- * Indicates that an address currently atop the evaluation stack might not be aligned
- * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj,
- * stobj, initblk, or cpblk instruction.
- */
- final val Unaligned = new OpCode()
- opcode(Unaligned, CEE_UNALIGNED, "unaligned.", 0xFFFFFE12, POP_NONE, PUSH_NONE, INLINE_I_S , FLOW_META)
-
- /**
- * Specifies that an address currently atop the evaluation stack might be volatile,
- * and the results of reading that location cannot be cached or that multiple stores
- * to that location cannot be suppressed.
- */
- final val Volatile = new OpCode()
- opcode(Volatile, CEE_VOLATILE, "volatile." , 0xFFFFFE13, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META)
-
- /**
- * Performs a postfixed method call instruction such that the current method's stack
- * frame is removed before the actual call instruction is executed.
- */
- final val Tailcall = new OpCode()
- opcode(Tailcall, CEE_TAILCALL, "tail." , 0xFFFFFE14, POP_NONE, PUSH_NONE, INLINE_NONE, FLOW_META)
-
- /**
- * Initializes all the fields of the object at a specific address to a null reference
- * or a 0 of the appropriate primitive type.
- */
- final val Initobj = new OpCode()
- opcode(Initobj, CEE_INITOBJ , "initobj" , 0xFFFFFE15, POP_I , PUSH_NONE, INLINE_TYPE, FLOW_NEXT)
-
- /**
- * Copies a specified number bytes from a source address to a destination address .
- */
- final val Cpblk = new OpCode()
- opcode(Cpblk, CEE_CPBLK , "cpblk" , 0xFFFFFE17, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Initializes a specified block of memory at a specific address to a given size
- * and initial value.
- */
- final val Initblk = new OpCode()
- opcode(Initblk, CEE_INITBLK , "initblk" , 0xFFFFFE18, POP_I_I_I, PUSH_NONE, INLINE_NONE, FLOW_NEXT)
-
- /**
- * Rethrows the current exception.
- */
- final val Rethrow = new OpCode()
- opcode(Rethrow, CEE_RETHROW , "rethrow", 0xFFFFFE1A, POP_NONE , PUSH_NONE, INLINE_NONE, FLOW_THROW)
-
- /**
- * Pushes the size, in bytes, of a supplied value type onto the evaluation stack.
- */
- final val Sizeof = new OpCode()
- opcode(Sizeof, CEE_SIZEOF, "sizeof", 0xFFFFFE1C, POP_NONE , PUSH_I , INLINE_TYPE, FLOW_NEXT)
-
-
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
deleted file mode 100644
index 80e4267436..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ /dev/null
@@ -1,1205 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-
-/**
- * Provides field representations of the Microsoft Intermediate Language (MSIL)
- * instructions for emission by the ILGenerator class members (such as Emit).
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-object OpCodes {
-
- //##########################################################################
-
- /**
- * Adds two values and pushes the result onto the evaluation stack.
- */
- final val Add = OpCode.Add
-
- /**
- * Fills space if bytecodes are patched. No meaningful operation is performed
- * although a processing cycle can be consumed.
- */
- final val Nop = OpCode.Nop
-
- /**
- * Signals the Common Language Infrastructure (CLI) to inform the debugger that
- * a break point has been tripped.
- */
- final val Break = OpCode.Break
-
- /**
- * Loads the argument at index 0 onto the evaluation stack.
- */
- final val Ldarg_0 = OpCode.Ldarg_0
-
- /**
- * Loads the argument at index 1 onto the evaluation stack.
- */
- final val Ldarg_1 = OpCode.Ldarg_1
-
- /**
- * Loads the argument at index 2 onto the evaluation stack.
- */
- final val Ldarg_2 = OpCode.Ldarg_2
-
- /**
- * Loads the argument at index 3 onto the evaluation stack.
- */
- final val Ldarg_3 = OpCode.Ldarg_3
-
- /**
- * Loads the local variable at index 0 onto the evaluation stack.
- */
- final val Ldloc_0 = OpCode.Ldloc_0
-
- /**
- * Loads the local variable at index 1 onto the evaluation stack.
- */
- final val Ldloc_1 = OpCode.Ldloc_1
-
- /**
- * Loads the local variable at index 2 onto the evaluation stack.
- */
- final val Ldloc_2 = OpCode.Ldloc_2
-
- /**
- * Loads the local variable at index 3 onto the evaluation stack.
- */
- final val Ldloc_3 = OpCode.Ldloc_3
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 0.
- */
- final val Stloc_0 = OpCode.Stloc_0
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 1.
- */
- final val Stloc_1 = OpCode.Stloc_1
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 2.
- */
- final val Stloc_2 = OpCode.Stloc_2
-
- /**
- * Pops the current value from the top of the evaluation stack and
- * stores it in a the local variable list at index 3.
- */
- final val Stloc_3 = OpCode.Stloc_3
-
- /**
- * Loads the argument (referenced by a specified short form index)
- * onto the evaluation stack.
- */
- final val Ldarg_S = OpCode.Ldarg_S
-
- /**
- * Load an argument address, in short form, onto the evaluation stack.
- */
- final val Ldarga_S = OpCode.Ldarga_S
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack,
- * short form.
- */
- final val Ldloc_S = OpCode.Ldloc_S
-
- /**
- * Loads the address of the local variable at a specific index onto
- * the evaluation stack, short form.
- */
- final val Ldloca_S = OpCode.Ldloca_S
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot
- * at a specified index, short form.
- */
- final val Starg_S = OpCode.Starg_S
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it
- * in a the local variable list at index (short form).
- */
- final val Stloc_S = OpCode.Stloc_S
-
- /**
- * Pushes a null reference (type O) onto the evaluation stack.
- */
- final val Ldnull = OpCode.Ldnull
-
- /**
- * Pushes the integer value of -1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_M1 = OpCode.Ldc_I4_M1
-
- /**
- * Pushes the integer value of 0 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_0 = OpCode.Ldc_I4_0
-
- /**
- * Pushes the integer value of 1 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_1 = OpCode.Ldc_I4_1
-
- /**
- * Pushes the integer value of 2 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_2 = OpCode.Ldc_I4_2
-
- /**
- * Pushes the integer value of 3 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_3 = OpCode.Ldc_I4_3
-
- /**
- * Pushes the integer value of 4 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_4 = OpCode.Ldc_I4_4
-
- /**
- * Pushes the integer value of 5 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_5 = OpCode.Ldc_I4_5
-
- /**
- * Pushes the integer value of 6 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_6 = OpCode.Ldc_I4_6
-
- /**
- * Pushes the integer value of 7 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_7 = OpCode.Ldc_I4_7
-
- /**
- * Pushes the integer value of 8 onto the evaluation stack as an int32.
- */
- final val Ldc_I4_8 = OpCode.Ldc_I4_8
-
- /**
- * Pushes the supplied int8 value onto the evaluation stack as an int32, short form.
- */
- final val Ldc_I4_S = OpCode.Ldc_I4_S
-
- /**
- * Pushes a supplied value of type int32 onto the evaluation stack as an int32.
- */
- final val Ldc_I4 = OpCode.Ldc_I4
-
- /**
- * Pushes a supplied value of type int64 onto the evaluation stack as an int64.
- */
- final val Ldc_I8 = OpCode.Ldc_I8
-
- /**
- * Pushes a supplied value of type float32 onto the evaluation stack as type F (float).
- */
- final val Ldc_R4 = OpCode.Ldc_R4
-
- /**
- * Pushes a supplied value of type float64 onto the evaluation stack as type F (float).
- */
- final val Ldc_R8 = OpCode.Ldc_R8
-
- /**
- * Copies the current topmost value on the evaluation stack, and then pushes the copy
- * onto the evaluation stack.
- */
- final val Dup = OpCode.Dup
-
- /**
- * Removes the value currently on top of the evaluation stack.
- */
- final val Pop = OpCode.Pop
-
- /**
- * Exits current method and jumps to specified method.
- */
- final val Jmp = OpCode.Jmp
-
- /**
- * Calls the method indicated by the passed method descriptor.
- */
- final val Call = OpCode.Call
-
- /**
- * constrained. prefix
- */
- final val Constrained = OpCode.Constrained
-
- /**
- * readonly. prefix
- */
- final val Readonly = OpCode.Readonly
-
- /**
- * Calls the method indicated on the evaluation stack (as a pointer to an entry point)
- * with arguments described by a calling convention.
- */
- final val Calli = OpCode.Calli
-
- /**
- * Returns from the current method, pushing a return value (if present) from the caller's
- * evaluation stack onto the callee's evaluation stack.
- */
- final val Ret = OpCode.Ret
-
- /**
- * Unconditionally transfers control to a target instruction (short form).
- */
- final val Br_S = OpCode.Br_S
-
- /**
- * Transfers control to a target instruction if value is false, a null reference, or zero.
- */
- final val Brfalse_S = OpCode.Brfalse_S
-
- /**
- * Transfers control to a target instruction (short form) if value is true, not null, or non-zero.
- */
- final val Brtrue_S = OpCode.Brtrue_S
-
- /**
- * Transfers control to a target instruction (short form) if two values are equal.
- */
- final val Beq_S = OpCode.Beq_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * or equal to the second value.
- */
- final val Bge_S = OpCode.Bge_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value.
- */
- final val Bgt_S = OpCode.Bgt_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value.
- */
- final val Ble_S = OpCode.Ble_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value.
- */
- final val Blt_S = OpCode.Blt_S
-
- /**
- * Transfers control to a target instruction (short form) when two unsigned integer values
- * or unordered float values are not equal.
- */
- final val Bne_Un_S = OpCode.Bne_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greather
- * than the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un_S = OpCode.Bge_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is greater than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un_S = OpCode.Bgt_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * or equal to the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un_S = OpCode.Ble_Un_S
-
- /**
- * Transfers control to a target instruction (short form) if the first value is less than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un_S = OpCode.Blt_Un_S
-
- /**
- * Unconditionally transfers control to a target instruction.
- */
- final val Br = OpCode.Br
-
- /**
- * Transfers control to a target instruction if value is false, a null reference
- * (Nothing in Visual Basic), or zero.
- */
- final val Brfalse = OpCode.Brfalse
-
- /**
- * Transfers control to a target instruction if value is true, not null, or non-zero.
- */
- final val Brtrue = OpCode.Brtrue
-
- /**
- * Transfers control to a target instruction if two values are equal.
- */
- final val Beq = OpCode.Beq
-
- /**
- * Transfers control to a target instruction if the first value is greater than or
- * equal to the second value.
- */
- final val Bge = OpCode.Bge
-
- /**
- * Transfers control to a target instruction if the first value is greater than the second value.
- */
- final val Bgt = OpCode.Bgt
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal
- * to the second value.
- */
- final val Ble = OpCode.Ble
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value.
- */
- final val Blt = OpCode.Blt
-
- /**
- * Transfers control to a target instruction when two unsigned integer values or
- * unordered float values are not equal.
- */
- final val Bne_Un = OpCode.Bne_Un
-
- /**
- * Transfers control to a target instruction if the first value is greather than
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bge_Un = OpCode.Bge_Un
-
- /**
- * Transfers control to a target instruction if the first value is greater than the
- * second value, when comparing unsigned integer values or unordered float values.
- */
- final val Bgt_Un = OpCode.Bgt_Un
-
- /**
- * Transfers control to a target instruction if the first value is less than or equal to
- * the second value, when comparing unsigned integer values or unordered float values.
- */
- final val Ble_Un = OpCode.Ble_Un
-
- /**
- * Transfers control to a target instruction if the first value is less than the second value,
- * when comparing unsigned integer values or unordered float values.
- */
- final val Blt_Un = OpCode.Blt_Un
-
- /**
- * Implements a jump table.
- */
- final val Switch = OpCode.Switch
-
- /**
- * Loads a value of type int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I1 = OpCode.Ldind_I1
-
- /**
- * Loads a value of type int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I2 = OpCode.Ldind_I2
-
- /**
- * Loads a value of type int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_I4 = OpCode.Ldind_I4
-
- /**
- * Loads a value of type int64 as an int64 onto the evaluation stack indirectly.
- */
- final val Ldind_I8 = OpCode.Ldind_I8
-
- /**
- * Loads a value of type natural int as a natural int onto the evaluation stack indirectly.
- */
- final val Ldind_I = OpCode.Ldind_I
-
- /**
- * Loads a value of type float32 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R4 = OpCode.Ldind_R4
-
- /**
- * Loads a value of type float64 as a type F (float) onto the evaluation stack indirectly.
- */
- final val Ldind_R8 = OpCode.Ldind_R8
-
- /**
- * Loads an object reference as a type O (object reference) onto the evaluation stack indirectly.
- */
- final val Ldind_Ref = OpCode.Ldind_Ref
-
- /**
- * Loads a value of type unsigned int8 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U1 = OpCode.Ldind_U1
-
- /**
- * Loads a value of type unsigned int16 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U2 = OpCode.Ldind_U2
-
- /**
- * Loads a value of type unsigned int32 as an int32 onto the evaluation stack indirectly.
- */
- final val Ldind_U4 = OpCode.Ldind_U4
-
- /**
- * Stores a object reference value at a supplied address.
- */
- final val Stind_Ref = OpCode.Stind_Ref
-
- /**
- * Stores a value of type int8 at a supplied address.
- */
- final val Stind_I1 = OpCode.Stind_I1
-
- /**
- * Stores a value of type int16 at a supplied address.
- */
- final val Stind_I2 = OpCode.Stind_I2
-
- /**
- * Stores a value of type int32 at a supplied address.
- */
- final val Stind_I4 = OpCode.Stind_I4
-
- /**
- * Stores a value of type int64 at a supplied address.
- */
- final val Stind_I8 = OpCode.Stind_I8
-
- /**
- * Stores a value of type float32 at a supplied address.
- */
- final val Stind_R4 = OpCode.Stind_R4
-
- /**
- * Stores a value of type float64 at a supplied address.
- */
- final val Stind_R8 = OpCode.Stind_R8
-
- /**
- * Subtracts one value from another and pushes the result onto the evaluation stack.
- */
- final val Sub = OpCode.Sub
-
- /**
- * Multiplies two values and pushes the result on the evaluation stack.
- */
- final val Mul = OpCode.Mul
-
- /**
- * Divides two values and pushes the result as a floating-point (type F) or
- * quotient (type int32) onto the evaluation stack.
- */
- final val Div = OpCode.Div
-
- /**
- * Divides two unsigned integer values and pushes the result (int32) onto the evaluation stack.
- */
- final val Div_Un = OpCode.Div_Un
-
- /**
- * Divides two values and pushes the remainder onto the evaluation stack.
- */
- final val Rem = OpCode.Rem
-
- /**
- * Divides two unsigned values and pushes the remainder onto the evaluation stack.
- */
- final val Rem_Un = OpCode.Rem_Un
-
- /**
- * Computes the bitwise AND of two values and pushes the result onto the evaluation stack.
- */
- final val And = OpCode.And
-
- /**
- * Compute the bitwise complement of the two integer values on top of the stack and
- * pushes the result onto the evaluation stack.
- */
- final val Or = OpCode.Or
-
- /**
- * Computes the bitwise XOR of the top two values on the evaluation stack,
- * pushing the result onto the evaluation stack.
- */
- final val Xor = OpCode.Xor
-
- /**
- * Shifts an integer value to the left (in zeroes) by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shl = OpCode.Shl
-
- /**
- * Shifts an integer value (in sign) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr = OpCode.Shr
-
- /**
- * Shifts an unsigned integer value (in zeroes) to the right by a specified number of bits,
- * pushing the result onto the evaluation stack.
- */
- final val Shr_Un = OpCode.Shr_Un
-
- /**
- * Negates a value and pushes the result onto the evaluation stack.
- */
- final val Neg = OpCode.Neg
-
- /**
- * Computes the bitwise complement of the integer value on top of the stack and pushes
- * the result onto the evaluation stack as the same type.
- */
- final val Not = OpCode.Not
-
- /**
- * Converts the value on top of the evaluation stack to int8, then extends (pads) it to int32.
- */
- final val Conv_I1 = OpCode.Conv_I1
-
- /**
- * Converts the value on top of the evaluation stack to int16, then extends (pads) it to int32.
- */
- final val Conv_I2 = OpCode.Conv_I2
-
- /**
- * Converts the value on top of the evaluation stack to int32.
- */
- final val Conv_I4 = OpCode.Conv_I4
-
- /**
- * Converts the value on top of the evaluation stack to int64.
- */
- final val Conv_I8 = OpCode.Conv_I8
-
- /**
- * Converts the value on top of the evaluation stack to float32.
- */
- final val Conv_R4 = OpCode.Conv_R4
-
- /**
- * Converts the value on top of the evaluation stack to float64.
- */
- final val Conv_R8 = OpCode.Conv_R8
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int32, and extends it to int32.
- */
- final val Conv_U4 = OpCode.Conv_U4
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int64, and extends it to int64.
- */
- final val Conv_U8 = OpCode.Conv_U8
-
- /**
- * Calls a late-bound method on an object, pushing the return value onto the evaluation stack.
- */
- final val Callvirt = OpCode.Callvirt
-
- /**
- * Copies the value type located at the address of an object (type &, * or natural int)
- * to the address of the destination object (type &, * or natural int).
- */
- final val Cpobj = OpCode.Cpobj
-
- /**
- * Copies the value type object pointed to by an address to the top of the evaluation stack.
- */
- final val Ldobj = OpCode.Ldobj
-
- /**
- * Pushes a new object reference to a string literal stored in the metadata.
- */
- final val Ldstr = OpCode.Ldstr
-
- /**
- * Creates a new object or a new instance of a value type, pushing an object reference
- * (type O) onto the evaluation stack.
- */
- final val Newobj = OpCode.Newobj
-
- /**
- * Attempts to cast an object passed by reference to the specified class.
- */
- final val Castclass = OpCode.Castclass
-
- /**
- * Tests whether an object reference (type O) is an instance of a particular class.
- */
- final val Isinst = OpCode.Isinst
-
- /**
- * Converts the unsigned integer value on top of the evaluation stack to float32.
- */
- final val Conv_R_Un = OpCode.Conv_R_Un
-
- /**
- * Converts the boxed representation of a value type to its unboxed form.
- */
- final val Unbox = OpCode.Unbox
-
- /**
- * Throws the exception object currently on the evaluation stack.
- */
- final val Throw = OpCode.Throw
-
- /**
- * Finds the value of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldfld = OpCode.Ldfld
-
- /**
- * Finds the address of a field in the object whose reference is currently
- * on the evaluation stack.
- */
- final val Ldflda = OpCode.Ldflda
-
- /**
- * Pushes the value of a static field onto the evaluation stack.
- */
- final val Ldsfld = OpCode.Ldsfld
-
- /**
- * Pushes the address of a static field onto the evaluation stack.
- */
- final val Ldsflda = OpCode.Ldsflda
-
- /**
- * Replaces the value stored in the field of an object reference or pointer with a new value.
- */
- final val Stfld = OpCode.Stfld
-
- /**
- * Replaces the value of a static field with a value from the evaluation stack.
- */
- final val Stsfld = OpCode.Stsfld
-
- /**
- * Copies a value of a specified type from the evaluation stack into a supplied memory address.
- */
- final val Stobj = OpCode.Stobj
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1_Un = OpCode.Conv_Ovf_I1_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2_Un = OpCode.Conv_Ovf_I2_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4_Un = OpCode.Conv_Ovf_I4_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8_Un = OpCode.Conv_Ovf_I8_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I_Un = OpCode.Conv_Ovf_I_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1_Un = OpCode.Conv_Ovf_U1_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2_Un = OpCode.Conv_Ovf_U2_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4_Un = OpCode.Conv_Ovf_U4_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8_Un = OpCode.Conv_Ovf_U8_Un
-
- /**
- * Converts the unsigned value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U_Un = OpCode.Conv_Ovf_U_Un
-
- /**
- * Converts a value type to an object reference (type O).
- */
- final val Box = OpCode.Box
-
- /**
- * Pushes an object reference to a new zero-based, one-dimensional array whose elements
- * are of a specific type onto the evaluation stack.
- */
- final val Newarr = OpCode.Newarr
-
- /**
- * Pushes the number of elements of a zero-based, one-dimensional array
- * onto the evaluation stack.
- */
- final val Ldlen = OpCode.Ldlen
-
- /**
- * Loads the address of the array element at a specified array index onto
- * the top of the evaluation stack as type & (managed pointer).
- */
- final val Ldelema = OpCode.Ldelema
-
- /**
- * Loads the element with type natural int at a specified array index onto the top
- * of the evaluation stack as a natural int.
- */
- final val Ldelem_I = OpCode.Ldelem_I
-
- /**
- * Loads the element with type int8 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I1 = OpCode.Ldelem_I1
-
- /**
- * Loads the element with type int16 at a specified array index onto the top of
- * the evaluation stack as an int32.
- */
- final val Ldelem_I2 = OpCode.Ldelem_I2
-
- /**
- * Loads the element with type int32 at a specified array index onto the top of the
- * evaluation stack as an int32.
- */
- final val Ldelem_I4 = OpCode.Ldelem_I4
-
- /**
- * Loads the element with type int64 at a specified array index onto the top of the
- * evaluation stack as an int64.
- */
- final val Ldelem_I8 = OpCode.Ldelem_I8
-
- /**
- * Loads the element with type float32 at a specified array index onto the top of the
- * evaluation stack as type F (float)
- */
- final val Ldelem_R4 = OpCode.Ldelem_R4
-
- /**
- * Loads the element with type float64 at a specified array index onto the top of the
- * evaluation stack as type F (float) .
- */
- final val Ldelem_R8 = OpCode.Ldelem_R8
-
- /**
- * Loads the element containing an object reference at a specified array index onto
- * the top of the evaluation stack as type O (object reference).
- */
- final val Ldelem_Ref = OpCode.Ldelem_Ref
-
- /**
- * Loads the element with type unsigned int8 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U1 = OpCode.Ldelem_U1
-
- /**
- * Loads the element with type unsigned int16 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U2 = OpCode.Ldelem_U2
-
- /**
- * Loads the element with type unsigned int32 at a specified array index onto the top
- * of the evaluation stack as an int32.
- */
- final val Ldelem_U4 = OpCode.Ldelem_U4
-
- /**
- * Replaces the array element at a given index with the natural int value on
- * the evaluation stack.
- */
- final val Stelem_I = OpCode.Stelem_I
-
- /**
- * Replaces the array element at a given index with the int8 value on the evaluation stack.
- */
- final val Stelem_I1 = OpCode.Stelem_I1
-
- /**
- * Replaces the array element at a given index with the int16 value on the evaluation stack.
- */
- final val Stelem_I2 = OpCode.Stelem_I2
-
- /**
- * Replaces the array element at a given index with the int32 value on the evaluation stack.
- */
- final val Stelem_I4 = OpCode.Stelem_I4
-
- /**
- * Replaces the array element at a given index with the int64 value on the evaluation stack.
- */
- final val Stelem_I8 = OpCode.Stelem_I8
-
- /**
- * Replaces the array element at a given index with the float32 value on the evaluation stack.
- */
- final val Stelem_R4 = OpCode.Stelem_R4
-
- /**
- * Replaces the array element at a given index with the float64 value on the evaluation stack.
- */
- final val Stelem_R8 = OpCode.Stelem_R8
-
- /**
- * Replaces the array element at a given index with the object ref value (type O)
- * on the evaluation stack.
- */
- final val Stelem_Ref = OpCode.Stelem_Ref
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I1 = OpCode.Conv_Ovf_I1
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int16 and
- * extending it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I2 = OpCode.Conv_Ovf_I2
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I4 = OpCode.Conv_Ovf_I4
-
- /**
- * Converts the signed value on top of the evaluation stack to signed int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I8 = OpCode.Conv_Ovf_I8
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int8 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U1 = OpCode.Conv_Ovf_U1
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int16 and
- * extends it to int32, throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U2 = OpCode.Conv_Ovf_U2
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int32,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U4 = OpCode.Conv_Ovf_U4
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned int64,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U8 = OpCode.Conv_Ovf_U8
-
- /**
- * Retrieves the address (type &) embedded in a typed reference.
- */
- final val Refanyval = OpCode.Refanyval
-
- /**
- * Retrieves the type token embedded in a typed reference .
- */
- final val Refanytype = OpCode.Refanytype
-
- /**
- * Throws ArithmeticException if value is not a finite number.
- */
- final val Ckfinite = OpCode.Ckfinite
-
- /**
- * Pushes a typed reference to an instance of a specific type onto the evaluation stack.
- */
- final val Mkrefany = OpCode.Mkrefany
-
- /**
- * Converts a metadata token to its runtime representation, pushing it onto the evaluation stack.
- */
- final val Ldtoken = OpCode.Ldtoken
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int8, and extends it to int32.
- */
- final val Conv_U1 = OpCode.Conv_U1
-
- /**
- * Converts the value on top of the evaluation stack to unsigned int16, and extends it to int32.
- */
- final val Conv_U2 = OpCode.Conv_U2
-
- /**
- * Converts the value on top of the evaluation stack to natural int.
- */
- final val Conv_I = OpCode.Conv_I
-
- /**
- * Converts the signed value on top of the evaluation stack to signed natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_I = OpCode.Conv_Ovf_I
-
- /**
- * Converts the signed value on top of the evaluation stack to unsigned natural int,
- * throwing OverflowException on overflow.
- */
- final val Conv_Ovf_U = OpCode.Conv_Ovf_U
-
- /**
- * Adds two integers, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf = OpCode.Add_Ovf
-
- /**
- * Adds two unsigned integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Add_Ovf_Un = OpCode.Add_Ovf_Un
-
- /**
- * Multiplies two integer values, performs an overflow check, and pushes the result
- * onto the evaluation stack.
- */
- final val Mul_Ovf = OpCode.Mul_Ovf
-
- /**
- * Multiplies two unsigned integer values , performs an overflow check ,
- * and pushes the result onto the evaluation stack.
- */
- final val Mul_Ovf_Un = OpCode.Mul_Ovf_Un
-
- /**
- * Subtracts one integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf = OpCode.Sub_Ovf
-
- /**
- * Subtracts one unsigned integer value from another, performs an overflow check,
- * and pushes the result onto the evaluation stack.
- */
- final val Sub_Ovf_Un = OpCode.Sub_Ovf_Un
-
- /**
- * Transfers control from the fault or finally clause of an exception block back to
- * the Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfinally = OpCode.Endfinally
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a specific target instruction.
- */
- final val Leave = OpCode.Leave
-
- /**
- * Exits a protected region of code, unconditionally tranferring control
- * to a target instruction (short form).
- */
- final val Leave_S = OpCode.Leave_S
-
- /**
- * Stores a value of type natural int at a supplied address.
- */
- final val Stind_I = OpCode.Stind_I
-
- /**
- * Converts the value on top of the evaluation stack to unsigned natural int,
- * and extends it to natural int.
- */
- final val Conv_U = OpCode.Conv_U
-
- /**
- * Returns an unmanaged pointer to the argument list of the current method.
- */
- final val Arglist = OpCode.Arglist
-
- /**
- * Compares two values. If they are equal, the integer value 1 (int32) is pushed
- * onto the evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Ceq = OpCode.Ceq
-
- /**
- * Compares two values. If the first value is greater than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt = OpCode.Cgt
-
- /**
- * Compares two unsigned or unordered values. If the first value is greater than
- * the second, the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Cgt_Un = OpCode.Cgt_Un
-
- /**
- * Compares two values. If the first value is less than the second,
- * the integer value 1 (int32) is pushed onto the evaluation stack
- * otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt = OpCode.Clt
-
- /**
- * Compares the unsigned or unordered values value1 and value2. If value1 is
- * less than value2, then the integer value 1 (int32) is pushed onto the
- * evaluation stack otherwise 0 (int32) is pushed onto the evaluation stack.
- */
- final val Clt_Un = OpCode.Clt_Un
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a specific method onto the evaluation stack.
- */
- final val Ldftn = OpCode.Ldftn
-
- /**
- * Pushes an unmanaged pointer (type natural int) to the native code implementing
- * a particular virtual method associated with a specified object onto the evaluation stack.
- */
- final val Ldvirtftn = OpCode.Ldvirtftn
-
- /**
- * Loads an argument (referenced by a specified index value) onto the stack.
- */
- final val Ldarg = OpCode.Ldarg
-
- /**
- * Load an argument address onto the evaluation stack.
- */
- final val Ldarga = OpCode.Ldarga
-
- /**
- * Loads the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloc = OpCode.Ldloc
-
- /**
- * Loads the address of the local variable at a specific index onto the evaluation stack.
- */
- final val Ldloca = OpCode.Ldloca
-
- /**
- * Stores the value on top of the evaluation stack in the argument slot at a specified index.
- */
- final val Starg = OpCode.Starg
-
- /**
- * Pops the current value from the top of the evaluation stack and stores it in a
- * the local variable list at a specified index.
- */
- final val Stloc = OpCode.Stloc
-
- /**
- * Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
- */
- final val Localloc = OpCode.Localloc
-
- /**
- * Transfers control from the filter clause of an exception back to the
- * Common Language Infrastructure (CLI) exception handler.
- */
- final val Endfilter = OpCode.Endfilter
-
- /**
- * Indicates that an address currently atop the evaluation stack might not be aligned
- * to the natural size of the immediately following ldind, stind, ldfld, stfld, ldobj,
- * stobj, initblk, or cpblk instruction.
- */
- final val Unaligned = OpCode.Unaligned
-
- /**
- * Specifies that an address currently atop the evaluation stack might be volatile,
- * and the results of reading that location cannot be cached or that multiple stores
- * to that location cannot be suppressed.
- */
- final val Volatile = OpCode.Volatile
-
- /**
- * Performs a postfixed method call instruction such that the current method's stack
- * frame is removed before the actual call instruction is executed.
- */
- final val Tailcall = OpCode.Tailcall
-
- /**
- * Initializes all the fields of the object at a specific address to a null reference
- * or a 0 of the appropriate primitive type.
- */
- final val Initobj = OpCode.Initobj
-
- /**
- * Copies a specified number bytes from a source address to a destination address .
- */
- final val Cpblk = OpCode.Cpblk
-
- /**
- * Initializes a specified block of memory at a specific address to a given size
- * and initial value.
- */
- final val Initblk = OpCode.Initblk
-
- /**
- * Rethrows the current exception.
- */
- final val Rethrow = OpCode.Rethrow
-
- /**
- * Pushes the size, in bytes, of a supplied value type onto the evaluation stack.
- */
- final val Sizeof = OpCode.Sizeof
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
deleted file mode 100644
index 8f9d81a8b0..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil.Type
-import ch.epfl.lamp.compiler.msil.ConstructorInfo
-import ch.epfl.lamp.compiler.msil.ParameterInfo
-import java.io.IOException
-
-/**
- * Creates or associates parameter information.
- * Parameter attributes need to consistent with the method signature.
- * If you specify Out attributes for a parameter, you should ensure that
- * the type of that method parameter is a ByRef type
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class ParameterBuilder(name: String, tpe: Type, attr: Int, pos: Int)
- extends ParameterInfo(name, tpe, attr, pos)
- with ICustomAttributeSetter
- with Visitable
-{
-
- //##########################################################################
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- //##########################################################################
-
- /** The apply method for a visitor */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseParameterBuilder(this)
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
deleted file mode 100644
index 5d59d4d25a..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/SingleFileILPrinterVisitor.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies in MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.FileWriter
-import java.io.BufferedWriter
-import java.io.PrintWriter
-import java.io.IOException
-import java.util.Iterator
-import java.util.HashMap
-import java.util.Arrays
-
-import ch.epfl.lamp.compiler.msil._
-import ch.epfl.lamp.compiler.msil.emit
-import ch.epfl.lamp.compiler.msil.util.Table
-
-/**
- * The MSIL printer Visitor. It prints a complete
- * assembly in a single file that can be compiled by ilasm.
- *
- * @author Nikolay Mihaylov
- * @author Daniel Lorch
- * @version 1.0
- */
-final class SingleFileILPrinterVisitor(_fileName: String) extends ILPrinterVisitor {
- var fileName: String = _fileName
-
- out = new PrintWriter(new BufferedWriter(new FileWriter(fileName)))
-
- /**
- * Visit an AssemblyBuilder
- */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder) {
- ILPrinterVisitor.currAssembly = assemblyBuilder
-
- // first get the entryPoint
- this.entryPoint = assemblyBuilder.EntryPoint
-
- // all external assemblies
- as = assemblyBuilder.getExternAssemblies()
- scala.util.Sorting.quickSort(as)(assemblyNameComparator) // Arrays.sort(as, assemblyNameComparator)
-
- assemblyBuilder.generatedFiles += fileName
- printAssemblyBoilerplate()
-
- // print each module
- var m: Array[Module] = assemblyBuilder.GetModules()
- nomembers = true
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
-
- nomembers = false
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[ModuleBuilder])
- }
- // close out file
- out.close()
- ILPrinterVisitor.currAssembly = null
- }
-
- /**
- * Visit a ModuleBuilder
- */
- @throws(classOf[IOException])
- def caseModuleBuilder(module: ModuleBuilder) {
- // print module declaration
- currentModule = module
- if (nomembers) {
- print(".module \'"); print(module.Name); println("\'")
- printAttributes(module)
- }
-
- if (!module.globalsCreated)
- module.CreateGlobalFunctions()
-
- var m: Array[MethodInfo] = module.GetMethods()
- for(i <- 0 until m.length) {
- print(m(i).asInstanceOf[MethodBuilder])
- }
-
- var t: Array[Type] = module.GetTypes()
- for(i <- 0 until t.length) {
- print(t(i).asInstanceOf[TypeBuilder])
- }
- currentModule = null
- }
-
-} // class SingleFileILPrinterVisitor
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
deleted file mode 100644
index 57dc883898..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ /dev/null
@@ -1,261 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import ch.epfl.lamp.compiler.msil._
-
-import ch.epfl.lamp.compiler.msil.util.PECustomMod
-
-import java.io.IOException
-
-/**
- * Defines and creates new instances of classes during runtime.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
- extends Type(module, attributes, fullName, baseType, interfaces, declType, 0)
- with ICustomAttributeSetter
- with Visitable
-{
- import TypeBuilder._
-
- //##########################################################################
- // public members
-
- /** 'Bakes' the type. */
- def CreateType(): Type = {
- fields = fieldBuilders.toArray // (new Array[FieldInfo](fieldBuilders.size())).asInstanceOf[Array[FieldInfo]]
- methods = methodBuilders.toArray // (new Array[MethodInfo](methodBuilders.size())).asInstanceOf[Array[MethodInfo]]
- constructors = constructorBuilders.toArray // (new Array[ConstructorInfo](constructorBuilders.size())).asInstanceOf[Array[ConstructorInfo]]
- nestedTypes = nestedTypeBuilders.toArray // (new Array[Type](nestedTypeBuilders.size())).asInstanceOf[Array[Type]]
-
- raw = false
- if (DeclaringType == null)
- Module.asInstanceOf[ModuleBuilder].addType(this)
- return this
- }
-
- /**
- * Adds a new field to the class, with the given name, attributes and field type. The location has no custom mods.
- */
- def DefineField(name: String, fieldType: Type, attrs: Short): FieldBuilder = {
- val fieldTypeWithCustomMods = new PECustomMod(fieldType, null)
- DefineField(name, fieldTypeWithCustomMods, attrs)
- }
-
- /**
- * Adds a new field to the class, with the given name, attributes and (field type , custom mods) combination.
- */
- def DefineField(name: String, fieldTypeWithMods: PECustomMod, attrs: Short): FieldBuilder = {
- val field: FieldBuilder = new FieldBuilder(name, this, attrs, fieldTypeWithMods)
- fieldBuilders += field
- return field
- }
-
- /**
- * Adds a new method to the class, with the given name and
- * method signature.
- */
- def DefineMethod(name: String, attrs: Short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
- val method = new MethodBuilder(name, this, attrs, returnType, paramTypes)
- val methods = methodBuilders.iterator
- while(methods.hasNext) {
- val m = methods.next().asInstanceOf[MethodInfo]
- if (methodsEqual(m, method)) {
- throw new RuntimeException("["+ Assembly() + "] Method has already been defined: " + m)
- }
- }
- methodBuilders += method
- return method
- }
-
- /**
- * Adds a new constructor to the class, with the given attributes
- * and signature.
- */
- def DefineConstructor(attrs: Short, callingConvention: Short, paramTypes: Array[Type]): ConstructorBuilder = {
- val constr = new ConstructorBuilder(this, attrs, paramTypes)
- val iter = constructorBuilders.iterator
- while(iter.hasNext) {
- val c = iter.next().asInstanceOf[ConstructorInfo]
- if (constructorsEqual(c, constr)) {
- throw new RuntimeException("["+ Assembly() + "] Constructor has already been defined: " + c)
- }
- }
- constructorBuilders += constr
- return constr
- }
-
- /**
- * Defines a nested type given its name.
- */
- def DefineNestedType(name: String, attributes: Int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
- val nested = nestedTypeBuilders.iterator
- while(nested.hasNext) {
- val nt = nested.next
- if (nt.Name.equals(name)) {
- val message = "Nested type " + name + " has already been defined: " + nt
- throw new RuntimeException(message)
- }
- }
- val t = new TypeBuilder(Module, attributes, name, baseType, interfaces, this)
- nestedTypeBuilders += t
- return t
- }
-
- /** Get the field with the corresponding name. */
- override def GetField(name: String): FieldInfo = {
- testRaw(name)
- return super.GetField(name)
- }
-
- /** Get all fields of the current Type. */
- override def GetFields(): Array[FieldInfo] = {
- testRaw("<GetFields>")
- return super.GetFields()
- }
-
- /**
- * Searches for a public instance constructor whose parameters
- * match the types in the specified array.
- */
- override def GetConstructor(params: Array[Type]): ConstructorInfo = {
- testRaw(".ctor" + types2String(params))
- return super.GetConstructor(params)
- }
-
- /**
- * Returns all the public constructors defined for the current Type.
- */
- override def GetConstructors(): Array[ConstructorInfo] = {
- testRaw("<GetConstructors>")
- return super.GetConstructors()
- }
-
- /**
- * Searches for the specified public method whose parameters
- * match the specified argument types.
- */
- override def GetMethod(name: String, params: Array[Type]): MethodInfo = {
- testRaw(name + types2String(params))
- return super.GetMethod(name, params)
- }
-
- /** Returns all the public methods of the current Type. */
- override def GetMethods(): Array[MethodInfo] = {
- testRaw("<GetMethods>")
- return super.GetMethods()
- }
-
- /** Searches for the nested type with the specified name. */
- override def GetNestedType(name: String): Type = {
- testRaw(name)
- super.GetNestedType(name)
- }
-
- /** Returns all the types nested within the current Type. */
- override def GetNestedTypes(): Array[Type] = {
- testRaw("<GetNestedTypes>")
- super.GetNestedTypes()
- }
-
- /** Returns a Type object that represents a one-dimensional array of the current type */
- def MakeArrayType(): Type = {
- Type.mkArray(this, 1)
- }
-
- /** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
- addCustomAttribute(constr, value)
- }
-
- def setPosition(sourceLine: Int, sourceFilename: String) {
- this.sourceLine = sourceLine
- this.sourceFilename = sourceFilename
- }
-
- def setSourceFilepath(sourceFilepath: String) {
- this.sourceFilepath = sourceFilepath
- }
-
- //##########################################################################
- // protected members
-
- var sourceLine: Int = _
- var sourceFilename: String = _
- var sourceFilepath: String = _
-
- var fieldBuilders = scala.collection.mutable.ArrayBuffer.empty[FieldBuilder]
- var methodBuilders = scala.collection.mutable.ArrayBuffer.empty[MethodBuilder]
- var constructorBuilders = scala.collection.mutable.ArrayBuffer.empty[ConstructorBuilder]
- var nestedTypeBuilders = scala.collection.mutable.ArrayBuffer.empty[TypeBuilder]
-
- // shows if the type is 'raw', i.e. still subject to changes
- private var raw = true
-
- // throws an exception if the type is 'raw',
- // i.e. not finalized by call to CreateType
- protected def testRaw(member: String) {
- if (raw)
- throw new RuntimeException("Not supported for TypeBuilder before CreateType(): " +
- FullName + "::" + member)
- }
-
- //##########################################################################
- // public members not part of the Reflection.Emit.TypeBuilder interface.
-
- /** The apply method for a visitor. */
- @throws(classOf[IOException])
- def apply(v: Visitor) {
- v.caseTypeBuilder(this)
- }
-
- //##########################################################################
-
-} // class TypeBuilder
-
-object TypeBuilder {
- def types2String(types: Array[Type]): String = {
- var s = new StringBuffer("(")
- for(i <- 0 until types.length) {
- if (i > 0) s.append(", ")
- s.append(types(i))
- }
- s.append(")")
- return s.toString()
- }
-
- def methodsEqual(m1: MethodInfo, m2: MethodInfo): Boolean = {
- if (!m1.Name.equals(m2.Name))
- return false
- if (m1.ReturnType != m2.ReturnType)
- return false
- val p1 = m1.GetParameters()
- val p2 = m2.GetParameters()
- if (p1.length != p2.length)
- return false
- for(i <- 0 until p1.length)
- if (p1(i).ParameterType != p2(i).ParameterType)
- return false
- return true
- }
-
- def constructorsEqual(c1: ConstructorInfo, c2: ConstructorInfo): Boolean = {
- if (c1.IsStatic != c2.IsStatic)
- return false
- val p1 = c1.GetParameters()
- val p2 = c2.GetParameters()
- if (p1.length != p2.length)
- return false
- for(i <- 0 until p1.length)
- if (p1(i).ParameterType != p2(i).ParameterType)
- return false
- return true
-}
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
deleted file mode 100644
index 28ec801dd4..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitable.scala
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/**
- * The Visitable interface
- */
-trait Visitable {
-
- //##########################################################################
-
- /**
- * the visitable method to apply a visitor
- */
- @throws(classOf[IOException])
- def apply(v: Visitor): Unit
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
deleted file mode 100644
index d4b84cdd4e..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Visitor.scala
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * System.Reflection.Emit-like API for writing .NET assemblies to MSIL
- */
-
-
-package ch.epfl.lamp.compiler.msil.emit
-
-import java.io.IOException
-
-/**
- * The Visitor interface to walk through the MSIL code Builder hierarchy.
- */
-trait Visitor {
-
- //##########################################################################
-
- /** Visit an AssemblyBuilder */
- @throws(classOf[IOException])
- def caseAssemblyBuilder(assemblyBuilder: AssemblyBuilder): Unit
-
- /** Visit a ModuleBuilder */
- @throws(classOf[IOException])
- def caseModuleBuilder(moduleBuilder: ModuleBuilder): Unit
-
- /** Visit a TypeBuilder */
- @throws(classOf[IOException])
- def caseTypeBuilder(typeBuilder: TypeBuilder): Unit
-
- /** Visit a FieldBuilder */
- @throws(classOf[IOException])
- def caseFieldBuilder(fieldBuilder: FieldBuilder): Unit
-
- /** Visit a ConstructorBuilder */
- @throws(classOf[IOException])
- def caseConstructorBuilder(constructorBuilder: ConstructorBuilder): Unit
-
- /** Visit a MethodBuilder */
- @throws(classOf[IOException])
- def caseMethodBuilder(methodBuilder: MethodBuilder): Unit
-
- /** Visit a ParameterBuilder */
- @throws(classOf[IOException])
- def caseParameterBuilder(parameterBuilder: ParameterBuilder): Unit
-
- /** Visit an ILGenerator */
- @throws(classOf[IOException])
- def caseILGenerator(iLGenerator: ILGenerator): Unit
-
- /** Visit an OpCode */
- @throws(classOf[IOException])
- def caseOpCode(opCode: OpCode): Unit
-
- /** Visit a LocalBuilder */
- @throws(classOf[IOException])
- def caseLocalBuilder(localBuilder: LocalBuilder): Unit
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
deleted file mode 100644
index 9a6e28a545..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/CustomAttributesTest.java
+++ /dev/null
@@ -1,31 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class CustomAttributesTest {
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("You must supply a filename!");
- System.exit(1);
- }
-
- Assembly assem = Assembly.LoadFrom(args[0]);
- Type.initMSCORLIB(assem);
-
- testCustomAttributes();
- }
-
- public static void testCustomAttributes() {
- Object[] attrs = Type.GetType("System.ObsoleteAttribute")
- .GetCustomAttributes(false);
- assert attrs != null;
- for (int i = 0; i < attrs.length; i++) {
- System.out.println("\t" + attrs[i]);
- }
- }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
deleted file mode 100644
index 96ec1bfeea..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/JavaTypeTest.java
+++ /dev/null
@@ -1,18 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.VJSAssembly;
-
-public class JavaTypeTest {
-
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("usage: java test.JavaTypeTest classname");
- System.exit(1);
- }
-
- Type type = VJSAssembly.VJSLIB.GetType(args[0]);
- MembersTest.dumpType(System.out, type);
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
deleted file mode 100644
index 37a5c6ea90..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/MembersTest.java
+++ /dev/null
@@ -1,100 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class MembersTest {
-
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println
- ("usage: java test.MembersTest assembly [classname]");
- System.exit(1);
- }
-
- Assembly mscorlib = Assembly.LoadFrom("mscorlib.dll");
- Type.initMSCORLIB(mscorlib);
- Assembly assem = Assembly.LoadFrom(args[0]);
- if (args.length > 1) {
- Type type = assem.GetType(args[1]);
- if (type != null)
- dumpMember(System.out, type);
- else System.err.println("Cannot find type " + args[1]
- + " in " + assem);
- } else {
- Type[] types = assem.GetTypes();
- System.out.println("Number of types in assembly " + assem
- + " -> " + types.length);
- dumpCustomAttributes(System.out, "assembly: ", assem);
- Module[] modules = assem.GetModules();
- for (int i = 0; i < modules.length; i++) {
- dumpCustomAttributes(System.out, "module " + modules[i] + ": ",
- modules[i]);
- }
- dumpMembers(System.out, types);
- }
- }
-
- public static final void dumpMember(PrintStream out, MemberInfo member) {
- try {
- if (member.MemberType() == MemberTypes.TypeInfo
- || member.MemberType() == MemberTypes.NestedType) {
- Type type = (Type)member;
- dumpCustomAttributes(out, "", type);
- out.print(TypeAttributes.accessModsToString(type.Attributes));
- out.print(type.IsInterface() ? " interface " : " class ");
- out.print(type);
- if (type.BaseType() != null)
- out.println(" extends " + type.BaseType());
- Type[] ifaces = type.GetInterfaces();
- if (ifaces.length > 0) {
- out.print("\timplements ");
- for (int i = 0; i < ifaces.length; i++) {
- out.print(ifaces[i]);
- if (i < (ifaces.length - 1))
- out.print(", ");
- }
- out.println();
- }
- out.println("{");
- int all = BindingFlags.Public | BindingFlags.DeclaredOnly// | BindingFlags.NonPublic
- | BindingFlags.Instance | BindingFlags.Static;
- dumpMembers(out, type.GetNestedTypes());
- dumpMembers(out, type.GetFields(all));
- dumpMembers(out, type.GetConstructors(all));
- dumpMembers(out, type.GetMethods(all));
- dumpMembers(out, type.GetProperties(all));
- dumpMembers(out, type.GetEvents());
- out.println("}");
- } else {
- dumpCustomAttributes(out, "", member);
- out.print(MemberTypes.toString(member.MemberType()));
- out.print(": "); out.print(member);
- out.println();
- }
- } catch (Throwable e) {
- String message = MemberTypes.toString(member.MemberType())
- + ": " + member;
- throw new RuntimeException(message, e);
- }
- }
-
- public static void dumpCustomAttributes(PrintStream out,
- String prefix,
- ICustomAttributeProvider att)
- {
- Object[] attrs = att.GetCustomAttributes(false);
- for (int j = 0; j < attrs.length; j++)
- out.println(prefix + attrs[j]);
- }
-
- public static void dumpMembers(PrintStream out, MemberInfo[] members) {
- for (int i = 0; i < members.length; i++) {
- dumpMember(out, members[i]);
- }
- }
-
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
deleted file mode 100644
index 1df389b011..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/TableDump.java
+++ /dev/null
@@ -1,311 +0,0 @@
-
-package ch.epfl.lamp.compiler.msil.tests;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.util.Table;
-import ch.epfl.lamp.compiler.msil.util.Table.*;
-
-import java.io.PrintStream;
-import java.io.FileNotFoundException;
-
-public class TableDump extends PEFile {
-
- //##########################################################################
-
- public TableDump(String filename) throws FileNotFoundException {
- super(filename);
- }
-
- /***/
- public void dump(PrintStream out) {
- out.println("CLI RVA: " + CLI_RVA);
- out.println("Optional header size: " + optHeaderSize);
- out.println("Number of sections: " + numOfSections);
- out.println();
-
- for (int i = 0; i < sections.length; i++) {
- sections[i].dump(out);
- out.println();
- }
-
- out.println("MetaData Offset: 0x" + Integer.toHexString(posMetadata));
- out.println("Number of streams: " + numOfStreams);
-
- out.println("#~ stream"); Meta.dump(out); out.println();
- out.println("#Strings stream"); Strings.dump(out); out.println();
- if (US != null) {
- out.println("#US stream"); US.dump(out); out.println();
- }
- out.println("#GUID stream"); GUID.dump(out); out.println();
- out.println("#Blob stream"); Blob.dump(out); out.println();
-
- out.println("Heap Sizes IndexedSeq = 0x0" + Integer.toHexString(heapSizes));
- out.println();
-
- for(int i = 0; i < Table.MAX_NUMBER; i++)
- if(getTable(i).rows > 0) {
- dump(out, getTable(i));
- out.println();
- }
-
- }
-
- /** Dumps the contents of this table. */
- public void dump(PrintStream out, Table table) {
- out.println("Table:" + " ID = 0x" + byte2hex(table.id));
- out.println("\tname = " + table.getTableName());
- out.println("\trows = " + table.rows);
- //out.println("\tStart pos in file = 0x" + Long.toHexString(table.start));
- for (int i = 1; i <= table.rows; i++)
- dumpRow(out, table, i);
- }
-
- public void dumpIndex(PrintStream out, int tableSetId, int index) {
- int tableId = Table.getTableId(tableSetId, index);
- int row = Table.getTableIndex(tableSetId, index);
- out.print(getTable(tableId).getTableName());
- out.print('[');
- out.print(getTable(tableId).isShort ? short2hex(row) : int2hex(row));
- out.print(']');
- }
-
- public void dumpRow(PrintStream out, Table table, int row) {
- table.readRow(row);
- out.print(table.getTableName());
- out.print("[" + short2hex(row) + "]: ");
- dumpRow(out, table);
- out.println();
- }
-
- /** Prints the current content of the fields of the class. */
- public void dumpRow(PrintStream out, Table table) {
- if (table instanceof ModuleDef) {
- ModuleDef t = (ModuleDef)table;
- out.print("Generation = 0x" + short2hex(t.Generation));
- out.print("; Name = " + getString(t.Name));
- //out.print("; Mvid = (" + bytes2hex(getGUID(Mvid)) + ")");
- } else if (table instanceof TypeRef) {
- TypeRef t = (TypeRef)table;
- out.print("FullName = " + t.getFullName());
- out.print("; ResolutionScope = 0x" + int2hex(t.ResolutionScope));
- } else if (table instanceof TypeDef) {
- TypeDef t = (TypeDef)table;
- out.print("Flags = 0x"); out.print(int2hex(t.Flags));
- out.print("; FullName = "); out.print(t.getFullName());
- out.print("; Extends = ");
- dumpIndex(out, Table._TypeDefOrRef, t.Extends);
- out.print("; FieldList = "); out.print(t.FieldList);
- out.print("; MethodList = "); out.print(t.MethodList);
- } else if (table instanceof FieldTrans) {
- FieldTrans t = (FieldTrans)table;
- out.print("Field = "); out.print(t.Field);
- } else if (table instanceof FieldDef) {
- FieldDef t = (FieldDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof MethodTrans) {
- MethodTrans t = (MethodTrans)table;
- out.print("Method = "); out.print(t.Method);
- } else if (table instanceof MethodDef) {
- MethodDef t = (MethodDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; ParamList = " + t.ParamList);
- out.print("; Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof ParamDef) {
- ParamDef t = (ParamDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; Sequence = " + t.Sequence);
- } else if (table instanceof InterfaceImpl) {
- InterfaceImpl t = (InterfaceImpl)table;
- out.print("Class = 0x" + short2hex(t.Class));// + " (ref to: ");
- //TypeDef td = (TypeDef) getTable(TypeDef.ID);
- //td.readRow(Class);
- //td.dumpRow(out);
- out.print("; Interface = 0x" + short2hex(t.Interface));
- } else if (table instanceof MemberRef) {
- MemberRef t = (MemberRef)table;
- out.print("Name = " + t.getName());
- out.print("; Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- out.print("; Class = " + t.Class);
- } else if (table instanceof Constant) {
- Constant t = (Constant)table;
- out.print("Parent = "); dumpIndex(out, Table._HasConstant, t.Parent);
- out.print("; Type = 0x" + byte2hex(t.Type));
- out.print("; Value = (" + bytes2hex(getBlob(t.Value)));
- out.print("); Value = " + t.getValue());
- } else if (table instanceof CustomAttribute) {
- CustomAttribute t = (CustomAttribute)table;
- //out.print("Parent = 0x" + int2hex(t.Parent));
- out.print("Parent = ");
- dumpIndex(out, Table._HasCustomAttribute, t.Parent);
- //out.print("; Type = 0x" + short2hex(t.Type));
- out.print("; Type = ");
- dumpIndex(out, Table._CustomAttributeType, t.Type);
- out.print("; Value = (" + bytes2hex(t.getValue()) + ")");
- } else if (table instanceof FieldMarshal) {
- FieldMarshal t = (FieldMarshal)table;
- out.print("NativeType = (");
- out.print(bytes2hex(getBlob(t.NativeType)) + ")");
- } else if (table instanceof DeclSecurity) {
- DeclSecurity t = (DeclSecurity)table;
- out.print("Action = 0x" + short2hex(t.Action));
- out.print("; PermissionSet = (" +
- bytes2hex(getBlob(t.PermissionSet)) + ")");
- } else if (table instanceof ClassLayout) {
- ClassLayout t = (ClassLayout)table;
- out.print("PackingSize = 0x" + short2hex(t.PackingSize));
- out.print("; ClassSize = 0x" + int2hex(t.ClassSize));
- out.print(": Parent = " + t.Parent + " (ref to: ");
- dumpRow(out, this.TypeDef(t.Parent));
- out.print(")");
- } else if (table instanceof FieldLayout) {
- FieldLayout t = (FieldLayout)table;
- out.print("Offset = 0x" + int2hex(t.Offset));
- out.print("; Field = (ref to: ");
- dumpRow(out, this.FieldDef(t.Field));
- out.print(")");
- } else if (table instanceof StandAloneSig) {
- StandAloneSig t = (StandAloneSig)table;
- out.print("StandAloneSig: Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof EventMap) {
- EventMap t = (EventMap)table;
- out.print("Parent = 0x" + int2hex(t.Parent) + " (ref to: ");
- dumpRow(out, this.TypeDef(t.Parent));
- out.print("); EventList = 0x"); out.print(int2hex(t.EventList));
- } else if (table instanceof EventDef) {
- EventDef t = (EventDef)table;
- out.print("EventFlags = 0x" + short2hex(t.EventFlags));
- out.print("; Name = " + t.getName());
- out.print("; EventType = 0x" + int2hex(t.EventType));
- } else if (table instanceof PropertyMap) {
- PropertyMap t = (PropertyMap)table;
- out.print("Parent = " + t.Parent + " (ref to: ");
- dumpRow(out, this.TypeDef(t.Parent));
- out.print(")");
- } else if (table instanceof PropertyDef) {
- PropertyDef t = (PropertyDef)table;
- out.print("Flags = 0x" + short2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; Type = (" + bytes2hex(getBlob(t.Type)) + ")");
- } else if (table instanceof MethodSemantics) {
- MethodSemantics t = (MethodSemantics)table;
- out.print("Semantics = 0x" + short2hex(t.Semantics));
- out.print("; Method = 0x" + int2hex(t.Method) + " (ref to: ");
- dumpRow(out, this.MethodDef(t.Method));
- out.print("); Association = 0x" + int2hex(t.Association));
- } else if (table instanceof MethodImpl) {
- MethodImpl t = (MethodImpl)table;
- out.print("Class = (ref to: ");
- dumpRow(out, this.TypeDef(t.Class));
- out.print(")");
- } else if (table instanceof ModuleRef) {
- ModuleRef t = (ModuleRef)table;
- out.print("Name = " + t.getName());
- } else if (table instanceof TypeSpec) {
- TypeSpec t = (TypeSpec)table;
- out.print("Signature = (" +
- bytes2hex(getBlob(t.Signature)) + ")");
- } else if (table instanceof ImplMap) {
- ImplMap t = (ImplMap)table;
- out.print("ImportName = " + getString(t.ImportName));
- } else if (table instanceof FieldRVA) {
- FieldRVA t = (FieldRVA)table;
- out.print("RVA = 0x" + int2hex(t.RVA));
- out.print("; Field = (ref to: ");
- dumpRow(out, this.FieldDef(t.Field));
- out.print(")");
- } else if (table instanceof AssemblyDef) {
- AssemblyDef t = (AssemblyDef)table;
- out.print("Flags = 0x" + int2hex(t.Flags));
- out.print(" ; Name = " + getString(t.Name));
- out.print("; Culture = " + getString(t.Culture));
- out.print(" ; Version = " + t.MajorVersion + ".");
- out.print(t.MinorVersion + "." + t.BuildNumber);
- out.print("." + t.RevisionNumber);
- out.print("; HashAlgId = 0x" + int2hex(t.HashAlgId));
- out.print("; PublicKey = (");
- out.print(bytes2hex(getBlob(t.PublicKey)) + ")");
- } else if (table instanceof AssemblyProcessor) {
- AssemblyProcessor t = (AssemblyProcessor)table;
- out.print("Processor = 0x" + int2hex(t.Processor));
- } else if (table instanceof AssemblyOS) {
- AssemblyOS t = (AssemblyOS)table;
- out.print("!?!");
- } else if (table instanceof AssemblyRef) {
- AssemblyRef t = (AssemblyRef)table;
- out.print("Flags = 0x" + int2hex(t.Flags));
- out.print("; Name = " + getString(t.Name));
- out.print("; Culture = " + getString(t.Culture));
- out.print("; Version = " + t.MajorVersion + "." + t.MinorVersion);
- out.print("." + t.BuildNumber + "." + t.RevisionNumber);
- out.print("; PublicKeyOrToken = (" +
- bytes2hex(getBlob(t.PublicKeyOrToken)) + ")");
- out.print("; HashValue = (" +
- bytes2hex(getBlob(t.HashValue)) + ")");
- } else if (table instanceof AssemblyRefProcessor) {
- AssemblyRefProcessor t = (AssemblyRefProcessor)table;
- out.print("!?!");
- } else if (table instanceof AssemblyRefOS) {
- AssemblyRefOS t = (AssemblyRefOS)table;
- out.print("!?!");
- } else if (table instanceof FileDef) {
- FileDef t = (FileDef)table;
- out.print("Flags = 0x" + int2hex(t.Flags));
- out.print("; Name = " + t.getName());
- out.print("; HashValue = (" + bytes2hex(getBlob(t.HashValue)) +")");
- } else if (table instanceof ExportedType) {
- ExportedType t = (ExportedType)table;
- out.print("FullName = " + t.getFullName());
- } else if (table instanceof ManifestResource) {
- ManifestResource t = (ManifestResource)table;
- out.print("Name = " + getString(t.Name));
- out.print("; Flags = 0x" + int2hex(t.Flags));
- } else if (table instanceof NestedClass) {
- NestedClass t = (NestedClass)table;
- out.print(this.TypeDef(t.EnclosingClass).getFullName());
- out.print("/");
- out.print(this.TypeDef(t.NestedClass).getFullName());
- } else
- throw new RuntimeException("Unknown table " + table.getClass());
- }
-
- //##########################################################################
-
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("You must supply a filename!");
- System.exit(1);
- }
-
- TableDump file = null;
- try {
- file = new TableDump(args[0]);
- } catch (FileNotFoundException e) { e.printStackTrace(); }
-
- if (args.length > 1) {
- nextarg:
- for (int i = 1; i < args.length; i++) {
- String name = args[i];
- for (int tableId = 0; tableId < Table.MAX_NUMBER; tableId++) {
- Table table = file.getTable(tableId);
- if ((table.rows > 0) && name.equals(table.getTableName())) {
- file.dump(System.out, table);
- System.out.println();
- continue nextarg;
- }
- }
- System.err.println("No such table: " + name);
- }
- } else
- file.dump(System.out);
- }
-
- //##########################################################################
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java b/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
deleted file mode 100644
index 2c5946a734..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/tests/Test.java
+++ /dev/null
@@ -1,92 +0,0 @@
-
-package test;
-
-import ch.epfl.lamp.compiler.msil.*;
-import ch.epfl.lamp.compiler.msil.util.Table;
-
-import java.io.PrintStream;
-
-public class Test {
- public static void main(String[] args) {
- if (args.length < 1) {
- System.err.println("You must supply a filename!");
- System.exit(1);
- }
-
- Assembly assem = Assembly.LoadFrom(args[0]);
- Type.initMSCORLIB(assem);
-
- //"System.Collections.ArrayList"
- if (args.length >= 2) {
- Type t = Type.GetType(args[1]);
- dumpType(System.out, t);
- } else {
- dumpAssembly(assem);
- }
- }
-
-
- public static void dumpAssembly(Assembly assem) {
- Module[] modules = assem.GetModules();
-// System.out.println("Modules in assembly " + assem +
-// " (" + modules.length + ")");
-// for (int i = 0; i < modules.length; i++) {
-// System.out.println("\t" + modules[i]);
-// }
-
- Type[] types = modules[0].GetTypes();
-// System.out.println("Types in assembly " + assem +
-// " (" + types.length + ")");
- for (int i = 0; i < types.length; i++) {
- System.out.println("#" + i + " -> " + types[i]);
- types[i].completeType();
- }
- }
-
- public static final void dumpType(PrintStream out, Type type) {
- out.println("Type = " + type);
- out.println("Name = " + type.Name);
- out.println("Namespace = " + type.Namespace);
- out.println("FullName = " + type.FullName);
- out.println("Attributes = " + TypeAttributes.toString(type.Attributes));
- out.println("BaseType = " + type.BaseType);
- Type[] ifaces = type.GetInterfaces();
- if (ifaces != null) {
- for (int i = 0; i < ifaces.length; i++)
- out.println("\timplements " + ifaces[i]);
- }
- out.println("Assembly = " + type.Assembly);
- out.println("Module = " + type.Module);
- out.println("DeclaringType = " + type.DeclaringType);
- out.println("IsInterface = " + type.IsInterface);
- out.println("IsAbstract = " + type.IsAbstract);
-
- FieldInfo[] fields = type.GetFields(BindingFlags.Instance
- | BindingFlags.Static
- | BindingFlags.NonPublic);
- out.println("\nFields (" + fields.length + "):");
- for (int i = 0; i < fields.length; i++) {
- out.println("\t" + fields[i]);
- out.println("\t\tDeclaringType = " + fields[i].DeclaringType);
- out.println("\t\tReflectedType = " + fields[i].ReflectedType);
- }
-
- ConstructorInfo[] constrs = type.GetConstructors();
- out.println("\nConstructors (" + constrs.length + "):");
- for (int i = 0; i < constrs.length; i++) {
- out.println("\t" + constrs[i]);
- }
-
-// MethodInfo[] methods = type.GetMethods(BindingFlags.Instance
-// | BindingFlags.Static
-// | BindingFlags.Public
-// | BindingFlags.NonPublic);
- MethodInfo[] methods = type.GetMethods();
- out.println("\nMethods (" + methods.length + "):");
- for (int i = 0; i < methods.length; i++) {
- out.println("\t" + methods[i]);
- out.println("\t\tDeclaringType = " + methods[i].DeclaringType);
- out.println("\t\tReflectedType = " + methods[i].ReflectedType);
- }
- }
-}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java
deleted file mode 100644
index 56519e8487..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PECustomMod.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.Type;
-import ch.epfl.lamp.compiler.msil.CustomModifier;
-
-/**
- * A PECustomMod holds the info parsed from metadata per the CustomMod production in Sec. 23.2.7, Partition II.
- * */
-public final class PECustomMod {
-
- public final Type marked;
- public final CustomModifier[] cmods;
-
- /** Terminology:
- the CustomModifier(s) are markers,
- and the msil.Type is a type marked by those markers. */
- public PECustomMod(Type marked, CustomModifier[] cmods) {
- this.marked = marked;
- this.cmods = cmods;
- }
-
-}
-
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
deleted file mode 100644
index 454a94e55c..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PESection.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-
-import java.io.PrintStream;
-
-/** Describes a section from a PE/COFF file
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PESection {
-
- private final PEFile file;
- private final long sectionStart;
-
- public final String name;
- public final int virtAddr;
- public final int virtSize;
- public final int realAddr;
- public final int realSize;
- public final int flags;
-
- private static final byte[] buf = new byte[8];
-
- public PESection(PEFile file) {
- this.file = file;
- sectionStart = file.pos();
- file.read(buf);
- int i;
- for(i = 7; (i >= 0) && (0 == buf[i]); i--);
- name = new String(buf, 0, i + 1);
- virtSize = file.readInt();
- virtAddr = file.readInt();
- realSize = file.readInt();
- realAddr = file.readInt();
- file.skip(3 * PEFile.INT_SIZE);
- flags = file.readInt();
- }
-
-
- public void dump(PrintStream out) {
- out.println("Section name: " + name +
- " (name.length=" + name.length() + ")");
- out.println("Virtual Address: 0x" + PEFile.int2hex(virtAddr));
- out.println("Virtual Size: 0x" + PEFile.int2hex(virtSize));
- out.println("Real Address: 0x" + PEFile.int2hex(realAddr));
- out.println("Real Size: 0x" + PEFile.int2hex(realSize));
- out.println("Flags: 0x" + PEFile.int2hex(flags));
- }
-
-} // class PESection
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java b/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
deleted file mode 100644
index 649d9e74f2..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/PEStream.java
+++ /dev/null
@@ -1,199 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import java.io.PrintStream;
-import java.io.IOException;
-
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-
-/**
- * Implements support for CLI streams within a PE file.
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public final class PEStream implements Signature {
-
- //##########################################################################
- // Members
-
- /** The name of the stream. */
- public final String name;
-
- /** The offset of the stream from the beginning of the file. */
- public final int offset;
-
- /** The size of the stream in bytes; shall be multiple of 4. */
- public final int size;
-
- private final PEFile file;
-
- private final ByteBuffer buffer;
-
- //##########################################################################
-
- /** The PEStream class constructor.
- * @param file - the PEFile to which this stream belongs
- */
- public PEStream(PEFile file) {
- this.file = file;
- offset = file.fromRVA(file.rvaMetadata + file.readInt());
- size = file.readInt();
- buffer = file.getBuffer(offset, size);
-
- int i = 0;
- byte [] _buf = new byte [16];
- do {
- _buf[i] = (byte) file.readByte();
- i++;
- } while(0 != _buf[i-1]);
- name = new String(_buf, 0, i - 1);
-
- file.align(PEFile.INT_SIZE, file.posMetadata);
- //assert size % 4 == 0;
- }
-
- /** Move to the specified position in the stream. */
- private void seek(int pos) {
- try {
- buffer.position(pos);
- } catch (IllegalArgumentException e) {
- System.err.println("\nSeek failed in file " + file
- + " for position " + pos
- + " of stream " + name + " (" + buffer + ")");
- throw e;
- }
- }
-
- /** Return a string from the specified position in the stream. */
- public String getString(int pos) {
- seek(pos);
- buffer.mark();
- int i;
- for (i = 0; getByte() != 0; i++);
- byte[] buf = new byte[i];
- buffer.reset(); // go back to the marked position
- buffer.get(buf);
- try {
- return new String(buf, "UTF-8");
- } catch (java.io.UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
- /** Read a byte from the stream. */
- public int getByte() {
- return (buffer.get() + 0x0100) & 0xff;
- }
-
- /** Return the GUID at the given position in the stream. */
- public byte[] getGUID(int pos) {
- seek(pos);
- byte[] buf = new byte[32]; // 128-bit GUID
- try {
- buffer.get(buf);
- } catch (Exception e) {
- System.err.println();
- System.err.println("PEStream.getBlob(): Exception for pos = " +
- pos + " and buf.length = " + buf.length);
- System.err.println("\tbuffer = " + buffer);
- e.printStackTrace();
- throw new RuntimeException();
- }
- return buf;
- }
-
- public int readLength() {
- int length = getByte();
- if ((length & 0x80) != 0) {
- length = ((length & 0x7f) << 8) | getByte();
- if ((length & 0x4000) != 0)
- length = ((length & 0x3fff) << 16) | (getByte()<<8) | getByte();
- }
- return length;
- }
-
- /** Return a blob from the specified position in the stream. */
- public byte[] getBlob(int pos) {
- seek(pos);
- // the length indicates the number of bytes
- // AFTER the encoded size of the blob
- int length = readLength();
- byte[] buf = new byte[length];
- buffer.get(buf);
- return buf;
- }
-
- /***/
- public Sig getSignature(int pos) {
- seek(pos);
- return file.newSignature(buffer);
- }
-
- /**
- */
- public Object getConstant(int type, int pos) {
- Object val = null;
- seek(pos);
- int length = readLength(); // skip over the blob length field
- switch (type) {
- case ELEMENT_TYPE_BOOLEAN:
- assert length == 1;
- return buffer.get() == 0 ? Boolean.FALSE : Boolean.TRUE;
- case ELEMENT_TYPE_CHAR:
- assert length == 2 : "length == " + length;
- return new Character(buffer.getChar());
- case ELEMENT_TYPE_I1:
- case ELEMENT_TYPE_U1: // TODO U1 not the same as I1
- assert length == 1;
- return new Byte(buffer.get());
- case ELEMENT_TYPE_I2:
- case ELEMENT_TYPE_U2:
- assert length == 2;
- return new Short(buffer.getShort());
- case ELEMENT_TYPE_I4:
- case ELEMENT_TYPE_U4:
- assert length == 4;
- return new Integer(buffer.getInt());
- case ELEMENT_TYPE_I8:
- case ELEMENT_TYPE_U8:
- assert length == 8;
- return new Long(buffer.getLong());
- case ELEMENT_TYPE_R4:
- assert length == 4;
- return new Float(buffer.getFloat());
- case ELEMENT_TYPE_R8:
- assert length == 8;
- return new Double(buffer.getDouble());
- case ELEMENT_TYPE_STRING:
-// length /= 2;
-// char[] chars = new char[length];
-// for (int i = 0; i < length; i++)
-// chars[i] = buffer.getChar();
-// val = new String(chars);
- try {
- return new String(getBlob(pos), "UTF-16LE");
- } catch(java.io.UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- default: throw new RuntimeException("Illegal constant type: " + type);
- }
- }
-
- public void dump(PrintStream out) {
- out.println("Stream name: " + name + " (length " +
- name.length() + " characters)");
- out.println("Stream offset: 0x" + PEFile.int2hex(offset));
- out.println("Stream size: 0x" + PEFile.int2hex(size));
- }
-
- //##########################################################################
-} // class PEStream
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
deleted file mode 100644
index d5dc0ff32c..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Signature.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET assemblies (DLL & EXE)
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.Type;
-
-/**
- * Signatures
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public interface Signature {
-
- //##########################################################################
-
- /** Marks end of a list. */
- public static final int ELEMENT_TYPE_END = 0x00;
- /** void */
- public static final int ELEMENT_TYPE_VOID = 0x01;
- /** boolean */
- public static final int ELEMENT_TYPE_BOOLEAN = 0x02;
- /** char */
- public static final int ELEMENT_TYPE_CHAR = 0x03;
- /** signed byte */
- public static final int ELEMENT_TYPE_I1 = 0x04;
- /** byte */
- public static final int ELEMENT_TYPE_U1 = 0x05;
- /** short */
- public static final int ELEMENT_TYPE_I2 = 0x06;
- /** unsigned short */
- public static final int ELEMENT_TYPE_U2 = 0x07;
- /** int */
- public static final int ELEMENT_TYPE_I4 = 0x08;
- /** unsigned int */
- public static final int ELEMENT_TYPE_U4 = 0x09;
- /** long */
- public static final int ELEMENT_TYPE_I8 = 0x0a;
- /** unsigned long */
- public static final int ELEMENT_TYPE_U8 = 0x0b;
- /** float */
- public static final int ELEMENT_TYPE_R4 = 0x0c;
- /** double */
- public static final int ELEMENT_TYPE_R8 = 0x0d;
- /** string */
- public static final int ELEMENT_TYPE_STRING = 0x0e;
- /** Followed by <type> token. */
- public static final int ELEMENT_TYPE_PTR = 0x0f;
- /** Followed by <type> token. */
- public static final int ELEMENT_TYPE_BYREF = 0x10;
- /** Followed by <type> token */
- public static final int ELEMENT_TYPE_VALUETYPE = 0x11;
- /** Followed by <type> token */
- public static final int ELEMENT_TYPE_CLASS = 0x12;
-
- public static final int ELEMENT_TYPE_VAR = 0x13;
-
- /**
- * <type> <rank> <boundsCount> <bound1> ... <loCount> <lo1> ...
- */
- public static final int ELEMENT_TYPE_ARRAY = 0x14;
-
- public static final int ELEMENT_TYPE_GENERICINST = 0x15;
- /***/
- public static final int ELEMENT_TYPE_TYPEDBYREF = 0x16;
- /** System.IntPtr */
- public static final int ELEMENT_TYPE_I = 0x18;
- /** System.UIntPtr */
- public static final int ELEMENT_TYPE_U = 0x19;
- /** Followed by full method signature. */
- public static final int ELEMENT_TYPE_FNPTR = 0x1b;
- /** System.Object. */
- public static final int ELEMENT_TYPE_OBJECT = 0x1c;
- /** Single-dim array with 0 lower bound. */
- public static final int ELEMENT_TYPE_SZARRAY = 0x1d;
-
- public static final int ELEMENT_TYPE_MVAR = 0x1e;
-
- /** Required modifier : followed by a TypeDef or TypeRef token. */
- public static final int ELEMENT_TYPE_CMOD_REQD = 0x1f;
- /** Optional modifier : followed by a TypeDef or TypeRef token. */
- public static final int ELEMENT_TYPE_CMOD_OPT = 0x20;
- /** Implemented within the CLI. */
- public static final int ELEMENT_TYPE_INTERNAL = 0x21;
- /** Or'd with following element types. */
- public static final int ELEMENT_TYPE_MODIFIER = 0x40;
- /** Sentinel for varargs method signature. */
- public static final int ELEMENT_TYPE_SENTINEL = 0x41;
- /**Denotes a local variable that points at a pinned object. */
- public static final int ELEMENT_TYPE_PINNED = 0x45;
-
- //##########################################################################
- // signature designators
-
- public static final int HASTHIS = 0x20;
- public static final int EXPLICITTHIS = 0x40;
- public static final int DEFAULT = 0x00;
- public static final int VARARG = 0x05;
- public static final int GENERIC = 0x10;
- public static final int SENTINEL = 0x41;
- public static final int C = 0x01;
- public static final int STDCALL = 0x02;
- public static final int THISCALL = 0x03;
- public static final int FASTCALL = 0x04;
- public static final int FIELD = 0x06;
- public static final int PROPERTY = 0x08;
- public static final int LOCAL_SIG = 0x07;
-
- //##########################################################################
- // extra IDs used in the serialization format of named arguments
- // to custom attributes. Reverse-engineered from compiled C# example
-
- /** What follows is a string with the full name of the type. */
- public static final int X_ELEMENT_TYPE_TYPE = 0x50;
-
- /** What follows is a string with the full name of the enumeration type*/
- public static final int X_ELEMENT_TYPE_ENUM = 0x55;
-
- /** The named argument specifies a field. */
- public static final int X_ELEMENT_KIND_FIELD = 0x53;
-
- /** The named argument specifies a property. */
- public static final int X_ELEMENT_KIND_PROPERTY = 0x54;
-
- //##########################################################################
-} // interface Signature
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java b/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
deleted file mode 100644
index 1f43b8c2fa..0000000000
--- a/src/msil/ch/epfl/lamp/compiler/msil/util/Table.java
+++ /dev/null
@@ -1,1859 +0,0 @@
-/*
- * System.Reflection-like API for acces to .NET Assemblies
- */
-
-
-package ch.epfl.lamp.compiler.msil.util;
-
-import ch.epfl.lamp.compiler.msil.PEFile;
-import ch.epfl.lamp.compiler.msil.PEFile.Sig;
-
-import java.io.PrintStream;
-import java.nio.ByteBuffer;
-import java.nio.MappedByteBuffer;
-
-/**
- * Represents a table in a .NET assembly
- *
- * @author Nikolay Mihaylov
- * @version 1.0
- */
-public abstract class Table {
-
- //##########################################################################
-
- public static final int MAX_NUMBER = 64;
-
- public static final long VALID_TABLES_MASK = 0x03ff3fb7ff57L;
-
- //##########################################################################
- // fields and methods for handling predefined sets of tables
-
- public static final int TABLE_SET_LENGTH = 13;
-
- public static final int _TypeDefOrRef = 0;
- public static final int _HasConstant = 1;
- public static final int _HasCustomAttribute = 2;
- public static final int _HasFieldMarshal = 3;
- public static final int _HasDeclSecurity = 4;
- public static final int _MemberRefParent = 5;
- public static final int _HasSemantics = 6;
- public static final int _MethodDefOrRef = 7;
- public static final int _MemberForwarded = 8;
- public static final int _Implementation = 9;
- public static final int _CustomAttributeType = 10;
- public static final int _ResolutionScope = 11;
- public static final int _TypeOrMethodDef = 12;
-
-
- public static final int[][] TableSet = new int[TABLE_SET_LENGTH][];
-
- static {
- TableSet[_TypeDefOrRef] =
- new int[] {TypeDef.ID, TypeRef.ID, TypeSpec.ID};
- TableSet[_HasConstant] =
- new int[] {FieldDef.ID, ParamDef.ID, PropertyDef.ID};
- TableSet[_HasCustomAttribute] =
- new int[] {MethodDef.ID, FieldDef.ID, TypeRef.ID, TypeDef.ID,
- ParamDef.ID, InterfaceImpl.ID, MemberRef.ID, ModuleDef.ID,
- -1, PropertyDef.ID, EventDef.ID, -1, ModuleRef.ID,
- TypeSpec.ID, AssemblyDef.ID, AssemblyRef.ID,
- FileDef.ID, ExportedType.ID, ManifestResource.ID};
- TableSet[_HasFieldMarshal] =
- new int[] {FieldDef.ID, ParamDef.ID};
- TableSet[_HasDeclSecurity] =
- new int[] {TypeDef.ID, MethodDef.ID, AssemblyDef.ID};
- TableSet[_MemberRefParent] =
- new int[] {-1, TypeRef.ID, ModuleRef.ID, MethodDef.ID, TypeSpec.ID};
- TableSet[_HasSemantics] =
- new int[] {EventDef.ID, PropertyDef.ID};
- TableSet[_MethodDefOrRef] =
- new int[] {MethodDef.ID, MemberRef.ID};
- TableSet[_MemberForwarded] =
- new int[] {FieldDef.ID, MethodDef.ID};
- TableSet[_Implementation] =
- new int[] {FileDef.ID, AssemblyRef.ID, ExportedType.ID};
- TableSet[_CustomAttributeType] =
- new int[] {-1, -1, MethodDef.ID, MemberRef.ID, -1};
- TableSet[_ResolutionScope] =
- new int[] {ModuleDef.ID, ModuleRef.ID, AssemblyRef.ID, TypeRef.ID};
- TableSet[_TypeOrMethodDef] =
- new int[]{TypeDef.ID, MethodDef.ID};
- }
-
- public static final int[] NoBits =
- new int[]{2, 2, 5, 1, 2, 3, 1, 1, 1, 2, 3, 2, 1};
-
- public static int getMask(int tableSetId) {
- return (1 << NoBits[tableSetId]) - 1;
- }
-
- public static int getTableId(int tableSet, int index) {
- return TableSet[tableSet][index & getMask(tableSet)];
- }
-
- public static int getTableIndex(int tableSet, int index) {
- return index >> NoBits[tableSet];
- }
-
- public static int encodeIndex(int index, int tableSetId, int tableId) {
- int[] tableSet = TableSet[tableSetId];
- for (int i = 0; i < tableSet.length; i++) {
- if (tableSet[i] == tableId)
- return (index << NoBits[tableSetId]) | i;
- }
- throw new RuntimeException("Cannot find table #" + tableId +
- " in table set #" + tableSetId);
- }
-
- //##########################################################################
-
- private static final String [] tableName = {
- "Module", "TypeRef", "TypeDef", " FieldTrans",
- "Field", "MethodTrans", "Method", "",
- "Param", "InterfaceImpl", "MemberRef", "Constant",
- "CustomAttribute", "FieldMarshal", "DeclSecurity","ClassLayout",
- "FieldLayout", "StandAloneSig", "EventMap", "",
- "Event", "PropertyMap", "", "Property",
- "MethodSemantics", "MethodImpl", "ModuleRef", "TypeSpec",
- "ImplMap", "FieldRVA", "", "",
- "Assembly", "AssemblyProcessor","AssemblyOS", "AssemblyRef",
- "AssemblyRefProcessor","AssemblyRefOS", "File", "ExportedType",
- "ManifestResource", "NestedClass", "GenericParam", "MethodSpec",
- "GenericParamConstraint", "", "", "",
- "", "", "", "",
- "", "", "", "",//0x30-0x37
- "", "", "", "",
- "", "", "", "" //0x37-0x3f
- };
-
- /** Creates a table with the given id and number of rows.
- */
- public static Table newTable(PEFile file, int id, int rows) {
- Table table = null;
- switch(id) {
- case ModuleDef.ID: table = new ModuleDef(file, rows); break;
- case TypeRef.ID: table = new TypeRef(file, rows); break;
- case TypeDef.ID: table = new TypeDef(file, rows); break;
- case FieldTrans.ID: table = new FieldTrans(file, rows); break;
- case FieldDef.ID: table = new FieldDef(file, rows); break;
- case MethodTrans.ID: table = new MethodTrans(file, rows); break;
- case MethodDef.ID: table = new MethodDef(file, rows); break;
- case ParamDef.ID: table = new ParamDef(file, rows); break;
- case InterfaceImpl.ID: table = new InterfaceImpl(file, rows); break;
- case MemberRef.ID: table = new MemberRef(file, rows); break;
- case Constant.ID: table = new Constant(file, rows); break;
- case CustomAttribute.ID: table = new CustomAttribute(file, rows); break;
- case FieldMarshal.ID: table = new FieldMarshal(file, rows); break;
- case DeclSecurity.ID: table = new DeclSecurity(file, rows); break;
- case ClassLayout.ID: table = new ClassLayout(file, rows); break;
- case FieldLayout.ID: table = new FieldLayout(file, rows); break;
- case StandAloneSig.ID: table = new StandAloneSig(file, rows); break;
- case EventMap.ID: table = new EventMap(file, rows); break;
- case EventDef.ID: table = new EventDef(file, rows); break;
- case PropertyMap.ID: table = new PropertyMap(file, rows); break;
- case PropertyDef.ID: table = new PropertyDef(file, rows); break;
- case MethodSemantics.ID: table = new MethodSemantics(file, rows); break;
- case MethodImpl.ID: table = new MethodImpl(file, rows); break;
- case ModuleRef.ID: table = new ModuleRef(file, rows); break;
- case TypeSpec.ID: table = new TypeSpec(file, rows); break;
- case ImplMap.ID: table = new ImplMap(file, rows); break;
- case FieldRVA.ID: table = new FieldRVA(file, rows); break;
- case AssemblyDef.ID: table = new AssemblyDef(file, rows); break;
- case AssemblyProcessor.ID: table = new AssemblyProcessor(file, rows); break;
- case AssemblyOS.ID: table = new AssemblyOS(file, rows); break;
- case AssemblyRef.ID: table = new AssemblyRef(file, rows); break;
- case AssemblyRefProcessor.ID:
- table = new AssemblyRefProcessor(file, rows); break;
- case AssemblyRefOS.ID: table = new AssemblyRefOS(file, rows); break;
- case FileDef.ID: table = new FileDef(file, rows); break;
- case ExportedType.ID: table = new ExportedType(file, rows); break;
- case ManifestResource.ID: table = new ManifestResource(file, rows); break;
- case NestedClass.ID: table = new NestedClass(file, rows); break;
- case GenericParam.ID:
- table = new GenericParam(file, rows);
- break;
- case MethodSpec.ID:
- table = new MethodSpec(file, rows);
- break;
- case GenericParamConstraint.ID:
- table = new GenericParamConstraint(file, rows);
- break;
- default:
- table = new Empty(id);
- }
-// System.out.println("created table " + table.getName() + " with "
-// + table.rows + " rows");
- return table;
- }
-
-
- //##########################################################################
- // public fields
-
- /** Number of rows in the table. */
- public final int rows;
-
- /** Table ID as specified in Partition II. */
- public final int id;
-
- /** The file to which the table belongs. */
- protected final PEFile file;
-
- /** Memory mapped buffer wrapping the table. */
- protected ByteBuffer buffer;
-
- /**
- * specified wheter a new memory-mapped byte buffer should be created
- * for this table.
- */
- protected boolean newMapping = false;
-
- /** Tells wheter the table is indexed by 2-byte (short) integer
- * or by 4-byte integer. */
- public final boolean isShort;
-
- private int rowSize = -1;
-
- // the starting position of the table relative to the beginning of the file
- private long start = -1;
-
- // the number of the row who can be accessed via the fields of the table
- private int currentRow = 0;
-
- //##########################################################################
-
- protected Table(PEFile file, int id, int rows) {
- this.file = file;
- this.id = id;
- this.rows = rows;//file.readInt();
- this.isShort = rows < (1 << 16);
-// assert ((1L << id) & VALID_TABLES_MASK) != 0
-// : "Table does not have a vaid ID: " + byte2hex(id);
- }
-
- /**
- * Additional table initialization.
- * @return the starting position of the next table in the stream.
- */
- public final long init(long start) {
- if (rows < 1)
- return start;
- if (this.start == -1)
- this.start = start;
- else throw new RuntimeException
- ("Cannot re-initialize table \'" + getTableName() + "\'");
- rowSize = getRowSize();
- int size = rows * rowSize();
- buffer = this.newMapping ? file.mapBuffer(start, size)
- : file.getBuffer(start, size);
- return start + size;
- }
-
-
- public final String getTableName() {
- return 0 <= id && id < MAX_NUMBER ? tableName[id] : "<NoTable>";
- }
-
- /**
- * @return the size of the row in bytes
- */
- public final int rowSize() {
- return rowSize;
- }
-
- /**
- * if the underlying buffer is memory-mapped, load its contents into memory
- */
- public void load() {
- if (buffer instanceof MappedByteBuffer)
- ((MappedByteBuffer)buffer).load();
- }
-
- /***/
- public final int readByte() {
- return (buffer.get() + 0x100) & 0xff;
- }
-
- /***/
- public final int readShort() {
- return (buffer.getShort() + 0x10000) & 0xffff;
- }
-
- /***/
- public final int readInt() {
- return buffer.getInt();
- }
-
- /***/
- public final int readStringIndex() {
- return file.StringIsShort ? readShort() : readInt();
- }
-
- /***/
- public final int readBlobIndex() {
- return file.BlobIsShort ? readShort() : readInt();
- }
-
- /***/
- public final int readGUIDIndex() {
- return file.GUIDIsShort ? readShort() : readInt();
- }
-
- /***/
- public final int readTableIndex(int tableId) {
- return file.getTable(tableId).isShort ? readShort() : readInt();
- }
-
- /***/
- public final int readTableSetIndex(int tableSetId) {
- return file.indexSize[tableSetId] == 2 ? readShort() : readInt();
- }
-
- /** Read the specified row and populate the fields of the instance. */
- public final void readRow(int row) {
- seekRow(row);
- int lastSeek = buffer.position();
- populateFields();
- int rowSizeRead = (int) (buffer.position() - lastSeek);
- if (rowSizeRead != rowSize())
- throw new RuntimeException("Table ID=0x" + PEFile.byte2hex(id) +
- ": read row size = " + rowSizeRead +
- "; expected row size = " + rowSize());
- currentRow = row;
- }
-
- /** Seeks in the file the position of the specified row. */
- protected final void seekRow(int row) {
- assert row > 0 && row <= rows
- : "Index " + row + " is not within the table with #rows = " + rows;
- buffer.position((row - 1)* rowSize());
- }
-
- public final int currentRow() { return currentRow; }
-
- public final void nextRow() { readRow(currentRow() + 1); }
-
- //##########################################################################
- // abstract members
-
- /** Assigns values to the fields of the class. */
- protected abstract void populateFields();
-
- /** Returns the size of a row in bytes. */
- protected abstract int getRowSize();
-
- //##########################################################################
- // a table with 0 rows
-
- private static final class Empty extends Table {
- public Empty(int id) {
- super(null, id, 0);
- }
- protected int getRowSize() { return 0; }
- protected void populateFields() {
- throw new RuntimeException("Table 0x" + PEFile.byte2hex(id));
- }
- }
-
- //##########################################################################
- // table Module; ID=0x00; p115, 21.27
-
- public static final class ModuleDef extends Table {
- public static final int ID = 0x00;
-
- /** 2-byte value; reserved - shall be 0. */
- public int Generation;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #GUID; used to distinguish between
- * two version of the same module. */
- public int Mvid;
-
- /** Index into #GUID; reserved - shall be 0. */
- public int EncId;
-
- /** Index into #GUID; reseved - shall be 0. */
- public int EncBaseId;
-
- public ModuleDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Generation = readShort();
- Name = readStringIndex();
- Mvid = readGUIDIndex();
- EncId = readGUIDIndex();
- EncBaseId = readGUIDIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() + 3*file.getGUIDIndexSize();
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- } // class ModuleDef
-
- //##########################################################################
- // table TypeRef; ID=0x01; p125, 21.35
-
- public static final class TypeRef extends Table {
- public static final int ID = 0x1;
-
- /** A ResolutionScope coded index. */
- public int ResolutionScope;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #String. */
- public int Namespace;
-
- public TypeRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- ResolutionScope = readTableSetIndex(_ResolutionScope);
- Name = readStringIndex();
- Namespace = readStringIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_ResolutionScope) +
- 2 * file.getStringIndexSize();
- }
-
- public String getFullName() {
- String namespace = file.getString(Namespace);
- return namespace.length() == 0 ? file.getString(Name)
- : namespace + "." + file.getString(Name);
- }
-
- } // class TypeRef
-
- //##########################################################################
- // table TypeDef; ID=0x02; p120, 21.34
-
- public static final class TypeDef extends Table {
- public static final int ID = 0x02;
-
- /** 4-byte bitmask of type TypeAttributes (22.1.14). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #String. */
- public int Namespace;
-
- /** TypeDefOrRef coded index. */
- public int Extends;
-
- /** Index into Field table.
- */
- public int FieldList;
-
- /** Index into Method table. */
- public int MethodList;
-
-
- public TypeDef(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- public String getFullName() {
- String namespace = file.getString(Namespace);
- return namespace.length() == 0 ? file.getString(Name)
- : namespace + "." + file.getString(Name);
- }
-
- protected void populateFields() {
- Flags = readInt();
- Name = readStringIndex();
- Namespace = readStringIndex();
- Extends = readTableSetIndex(_TypeDefOrRef);
- FieldList = readTableIndex(FieldDef.ID);
- MethodList = readTableIndex(MethodDef.ID);
- }
-
- protected int getRowSize() {
- return 4 + 2*file.getStringIndexSize() +
- file.getTableSetIndexSize(_TypeDefOrRef) +
- file.getTableIndexSize(FieldDef.ID) +
- file.getTableIndexSize(MethodDef.ID);
- }
-
- } // class TypeDef
-
- //##########################################################################
- // Table FieldTrans; ID=0x03; undocumented
-
- /**
- * Undocumented table. Appears to be used for translating the Field entry
- * in the TypeDef(0x02) table into the real entry in the Fields(0x06) table
- */
- public static final class FieldTrans extends Table {
- public static final int ID = 0x03;
-
- public int Field;
-
- public FieldTrans(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Field = readTableIndex(FieldDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(FieldDef.ID);
- }
-
- }
-
- //##########################################################################
- // table Field; ID=0x04; p102, 21.15
-
- public static final class FieldDef extends Table {
- public static final int ID = 0x04;
-
- /** 2-byte bitmask of type FieldAttributes (22.1.5). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int Signature;
-
- public FieldDef(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Flags = readShort();
- Name = readStringIndex();
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() + file.getBlobIndexSize();
- }
-
- public String getName() { return file.getString(Name); }
-
- public Sig getSignature() { return file.getSignature(Signature); }
-
- } //class FieldDef
-
- //##########################################################################
- // Table MethodTrans; ID=0x05; undocumented
-
- /**
- * Undocumented table. Appears to be used for translating the Method entry
- * in the TypeDef(0x02) table into the real entry in the Methods(0x06) table
- */
- public static final class MethodTrans extends Table {
- public static final int ID = 0x05;
-
- public int Method;
-
- public MethodTrans(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Method = readTableIndex(FieldDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(MethodDef.ID);
- }
-
- }
-
- //##########################################################################
- // table MethodDef; ID=0x06; p110, 21.24
-
- public static final class MethodDef extends Table {
- public static final int ID = 0x06;
-
- /** 4-byte constant. */
- public int RVA;
-
- /** 2-byte bitmask of type MethodImplAttributes (22.1.10). */
- public int ImplFlags;
-
- /** 2-byte bitmask of type MethodAttributes (22.1.9). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int Signature;
-
- /** Index into Param Table. */
- public int ParamList;
-
- public MethodDef(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- RVA = readInt();
- ImplFlags = readShort();
- Flags = readShort();
- Name = readStringIndex();
- Signature = readBlobIndex();
- ParamList = readTableIndex(ParamDef.ID);
- }
-
- protected int getRowSize() {
- return 8 + file.getStringIndexSize() + file.getBlobIndexSize() +
- file.getTableIndexSize(ParamDef.ID);
- }
-
- public String getName() { return file.getString(Name); }
-
- public Sig getSignature() { return file.getSignature(Signature); }
- } // class Method
-
- //##########################################################################
- // table Param; ID=0x08; p116, 21.30
-
- public static final class ParamDef extends Table {
- public static final int ID = 0x08;
-
- /** 2-byte bitmask of type ParamAttributes (22.1.12). */
- public int Flags;
-
- /** 2-byte constant. */
- public int Sequence;
-
- /** Index into #String. */
- public int Name;
-
- public ParamDef(PEFile file, int rows) {
- super(file, ID, rows);
- newMapping = true;
- }
-
- protected void populateFields() {
- Flags = readShort();
- Sequence = readShort();
- Name = readStringIndex();
- }
-
- protected int getRowSize() { return 4 + file.getStringIndexSize(); }
-
- public String getName() { return file.getString(Name); }
-
- } // class Param
-
- //##########################################################################
- // table InterfaceImpl, ID=0x09; p107, 21.21
-
- public static final class InterfaceImpl extends Table {
- public static final int ID = 0x09;
-
- /** Index into TypeDef table. */
- public int Class;
-
- /** Index into TypeDefOrRef table set. */
- public int Interface;
-
- public InterfaceImpl(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Class = readTableIndex(TypeDef.ID);
- Interface = readTableSetIndex(_TypeDefOrRef);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- file.getTableSetIndexSize(_TypeDefOrRef);
- }
-
- /** finds the index of the first entry
- * @param targetIndex - index in the TypeDef table - the type to look for
- * @return the index of the first interface for the given type;
- * 0 if the type doesn't implement any interfaces
- */
-
- // binary search implementation
-// public int findType(int targetIndex) {
-// int l = 1, h = rows;
-// int classIndex;
-// while (l <= h) {
-// int mid = (l + h) / 2;
-// seekRow(mid);
-// classIndex = readTableIndex(TypeDef.ID);
-// if (targetIndex <= classIndex) h = mid - 1;
-// else l = mid + 1;
-// }
-// return (targetIndex == classIndex) ? h : 0;
-// }
-
- //linear search implementation
- public int findType(int targetIndex) {
- for (int i = 1; i <= rows; i++) {
- seekRow(i);
- if (targetIndex == readTableIndex(TypeDef.ID))
- return i;
- }
- return 0;
- }
-
- } // class InterfaceImpl
-
- //##########################################################################
- // table MemberRef; ID=0x0a; p109, 21.23
-
- public static final class MemberRef extends Table {
- public static final int ID = 0x0a;
-
- /** Index into MemberRefParent table set. */
- public int Class;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int Signature;
-
- public MemberRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Class = readTableSetIndex(_MemberRefParent);
- Name = readStringIndex();
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_MemberRefParent) +
- file.getStringIndexSize() + file.getBlobIndexSize();
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- public Sig getSignature() {
- return file.getSignature(Signature);
- }
-
- } // class MemberRef
-
- //##########################################################################
- // table Constant; ID=0x0b; p95, 21.9
-
- public static final class Constant extends Table {
- public static final int ID = 0x0b;
-
- /** 1-byte constant followed by 1-byte padding 0 (see 22.1.15). */
- public int Type;
-
- /** Index into HasConst table set. */
- public int Parent;
-
- /** Index into #Blob. */
- public int Value;
-
- public Constant(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Type = readShort();
- Parent = readTableSetIndex(_HasConstant);
- Value = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getTableSetIndexSize(_HasConstant) +
- file.getBlobIndexSize();
- }
-
- public Object getValue() {
- if (Type == Signature.ELEMENT_TYPE_CLASS)
- return null;
- return file.Blob.getConstant(Type, Value);
- }
-
-
- } // class Constant
-
- //##########################################################################
- // table CustomAttribute; ID=0x0c; p95, 21.10
-
- public static final class CustomAttribute extends Table {
- public static final int ID = 0x0c;
-
- /** Index into any metadata table, except the CustomAttribute itself;
- * more precisely - index into HasCustomAttribute table set.
- */
- public int Parent;
-
- /** Index into the CustomAttributeType table set. */
- public int Type;
-
- /** Index into #Blob. */
- public int Value;
-
- public CustomAttribute(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableSetIndex(_HasCustomAttribute);
- Type = readTableSetIndex(_CustomAttributeType);
- Value = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_HasCustomAttribute) +
- file.getTableSetIndexSize(_CustomAttributeType) +
- file.getBlobIndexSize();
- }
-
- public byte[] getValue() {
- return Value == 0 ? null : file.getBlob(Value);
- }
- } // class CustomAttribute
-
- //##########################################################################
- // table FieldMarshal; ID=0x0d; p105, 21.17
-
- public static final class FieldMarshal extends Table {
- public static final int ID = 0x0d;
-
- /** Index into HasFieldMarshal table set. */
- public int Parent;
-
- /** Index into #Blob. */
- public int NativeType;
-
- public FieldMarshal(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableSetIndex(_HasFieldMarshal);
- NativeType = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_HasFieldMarshal) +
- file.getBlobIndexSize();
- }
-
- } // class FieldMarshal
-
- //##########################################################################
- // table DeclSecurity; ID=0x0e; p97, 21.11
-
- public static final class DeclSecurity extends Table {
- public static final int ID = 0x0e;
-
- /** 2-byte value. */
- public int Action;
-
- /** Index into HasDeclSecurity table set. */
- public int Parent;
-
- /** Index into #Blob. */
- public int PermissionSet;
-
- public DeclSecurity(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Action = readShort();
- Parent = readTableSetIndex(_HasDeclSecurity);
- PermissionSet = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getTableSetIndexSize(_HasDeclSecurity) +
- file.getBlobIndexSize();
- }
-
- } // class DeclSecurity
-
- //##########################################################################
- // table ClassLayout; ID=0x0f, p92, 21.8
-
- public static final class ClassLayout extends Table {
- public static final int ID = 0x0f;
-
- /** 2-byte constant. */
- public int PackingSize;
-
- /** 4-byte constant. */
- public int ClassSize;
-
- /** Index into TypeDef table. */
- public int Parent;
-
- public ClassLayout(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- PackingSize = readShort();
- ClassSize = readInt();
- Parent = readTableIndex(TypeDef.ID);
- }
-
- protected int getRowSize() {
- return 6 + file.getTableIndexSize(TypeDef.ID);
- }
-
- } // class ClassLayout
-
- //##########################################################################
- // table FieldLayout; ID=0x10; p104, 21.16
-
- public static final class FieldLayout extends Table {
- public static final int ID = 0x10;
-
- /** 4-byte constant. */
- public int Offset;
-
- /** Index into the Field table. */
- public int Field;
-
- public FieldLayout(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Offset = readInt();
- Field = readTableIndex(FieldDef.ID);
- }
-
- protected int getRowSize() {
- return 4 + file.getTableIndexSize(FieldDef.ID);
- }
-
- } // class FieldLayout
-
- //##########################################################################
- // table StandAloneSig; ID=0x11; p119, 21.33
-
- public static final class StandAloneSig extends Table {
- public static final int ID = 0x11;
-
- /** Index into #Blob. */
- public int Signature;
-
- public StandAloneSig(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() { return file.getBlobIndexSize(); }
-
- } // class StandAloneSig
-
- //##########################################################################
- // table EventMap; ID=0x12; p99, 21.12
-
- public static final class EventMap extends Table {
- public static final int ID = 0x12;
-
- /** Index into the TypeDef table. */
- public int Parent;
-
- /** Index into the Event table. */
- public int EventList;
-
- public EventMap(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableIndex(TypeDef.ID);
- EventList = readTableIndex(EventDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- file.getTableIndexSize(EventDef.ID);
- }
-
- } // class EventMap
-
- //##########################################################################
- // table Event; ID=0x14; p99, 21.13
-
- public static final class EventDef extends Table {
- public static final int ID = 0x14;
-
- /** 2-byte bitmask of type EventAttribute (22.1.4). */
- public int EventFlags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into TypeDefOrRef table set. [This corresponds to the Type
- * of the event; it is not the Type that owns the event]
- */
- public int EventType;
-
- public EventDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- EventFlags = readShort();
- Name = readStringIndex();
- EventType = readTableSetIndex(_TypeDefOrRef);
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() +
- file.getTableSetIndexSize(_TypeDefOrRef);
- }
-
- public String getName() { return file.getString(Name); }
-
- } // class EventDef
-
- //##########################################################################
- // table PropertyMap; ID=0x15; p119, 21.32
-
- public static final class PropertyMap extends Table {
- public static final int ID = 0x15;
-
- /** Index into the TypeDef table. */
- public int Parent;
-
- /** Index into the Property table. */
- public int PropertyList;
-
- public PropertyMap(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Parent = readTableIndex(TypeDef.ID);
- PropertyList = readTableIndex(PropertyDef.ID);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- file.getTableIndexSize(PropertyDef.ID);
- }
-
- } // class PropertyMap
-
- //##########################################################################
- // table Property; ID=0x17; p117, 21.31
-
- public static final class PropertyDef extends Table {
- public static final int ID = 0x17;
-
- /** 2-byte bitmask of type PropertyAttributes (22.1.13). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. (Indexes the signature in the #Blob) */
- public int Type;
-
- public PropertyDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Flags = readShort();
- Name = readStringIndex();
- Type = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 2 + file.getStringIndexSize() +
- file.getBlobIndexSize();
- }
-
- public String getName() { return file.getString(Name); }
-
- public Sig getSignature() { return file.getSignature(Type); }
-
- } // class PropertyDef
-
- //##########################################################################
- // table MethodSemantics; ID=0x18; p114, 21.26
-
- public static final class MethodSemantics extends Table {
- public static final int ID = 0x18;
-
- /** 2-byte bitmaks of type MethodSemanticsAttribute (22.1.11). */
- public int Semantics;
-
- /** Index into the Method table. */
- public int Method;
-
- /** Index into Event or Property table (HasSemantics table set). */
- public int Association;
-
- public MethodSemantics(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Semantics = readShort();
- Method = readTableIndex(MethodDef.ID);
- Association = readTableSetIndex(_HasSemantics);
- }
-
- protected int getRowSize() {
- return 2 + file.getTableIndexSize(MethodDef.ID) +
- file.getTableSetIndexSize(_HasSemantics);
- }
-
- public boolean isGetter() { return (Semantics & Getter) != 0; }
- public boolean isSetter() { return (Semantics & Setter) != 0; }
- public boolean isOther() { return (Semantics & Other) != 0; }
- public boolean isAddOn() { return (Semantics & AddOn) != 0; }
- public boolean isRemoveOn() { return (Semantics & RemoveOn) != 0; }
- public boolean isFire() { return (Semantics & Fire) != 0; }
-
- private static final short Setter = (short)0x0001;
- private static final short Getter = (short)0x0002;
- private static final short Other = (short)0x0004;
- private static final short AddOn = (short)0x0008;
- private static final short RemoveOn = (short)0x0010;
- private static final short Fire = (short)0x0020;
-
- } // class MethodSemantics
-
-
- //##########################################################################
- // table MethodImpl; ID=0x19; p113, 21.25
-
- public static final class MethodImpl extends Table {
- public static final int ID = 0x19;
-
- /** Index into the TypeDef table. */
- public int Class;
-
- /** Index into MethodDefOrRef table set. */
- public int MethodBody;
-
- /** Index into MethodDefOrRef table set. */
- public int MethodDeclaration;
-
- public MethodImpl(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Class = readTableIndex(TypeDef.ID);
- MethodBody = readTableSetIndex(_MethodDefOrRef);
- MethodDeclaration = readTableSetIndex(_MethodDefOrRef);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(TypeDef.ID) +
- 2 * file.getTableSetIndexSize(_MethodDefOrRef);
- }
-
- } // class MethodImpl
-
- //##########################################################################
- // table ModuleRef; ID=0x1a; p116, 21.28
-
- public static final class ModuleRef extends Table {
- public static final int ID = 0x1a;
-
- /** Index into #String. */
- public int Name;
-
- public ModuleRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Name = readStringIndex();
- }
-
- protected int getRowSize() { return file.getStringIndexSize(); }
-
- public String getName() { return file.getString(Name); }
-
- } // class ModuleRef
-
- //##########################################################################
- // table TypeSpec; ID=0x1b; p126, 21.36
-
- public static final class TypeSpec extends Table {
- public static final int ID = 0x1b;
-
- /** Index into #Blob, where the blob is formatted
- * as specified in 22.2.15
- */
- public int Signature;
-
- public TypeSpec(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Signature = readBlobIndex();
- }
-
- protected int getRowSize() { return file.getBlobIndexSize(); }
-
- public Sig getSignature() { return file.getSignature(Signature); }
- } // class TypeSpec
-
- //##########################################################################
- // table ImplMap; ID=0x1c; p107, 21.20
-
- public static final class ImplMap extends Table {
- public static final int ID = 0x1c;
-
- /** 2-byte bitmask of type PInvokeAttributes (22.1.7). */
- public int MappingFlags;
-
- /** Index into MemberForwarded table set. */
- public int MemberForwarded;
-
- /** Index into #String. */
- public int ImportName;
-
- /** Index into the ModuleRef table. */
- public int ImportScope;
-
- public ImplMap(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- MappingFlags = readShort();
- MemberForwarded = readTableSetIndex(_MemberForwarded);
- ImportName = readStringIndex();
- ImportScope = readTableIndex(ModuleRef.ID);
- }
-
- protected int getRowSize() {
- return 2 + file.getTableSetIndexSize(_MemberForwarded) +
- file.getStringIndexSize() +
- file.getTableIndexSize(ModuleRef.ID);
- }
-
- } // class ImplMap
-
- //##########################################################################
- // table FieldRVA; ID=0x1d; p106, 21.18
-
- public static final class FieldRVA extends Table {
- public static final int ID = 0x1d;
-
- /** 4-byte constant. */
- public int RVA;
-
- /** Index into the Field table. */
- public int Field;
-
- public FieldRVA(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- RVA = readInt();
- Field = readTableIndex(Table.FieldDef.ID);
- }
-
- protected int getRowSize() {
- return 4 + file.getTableIndexSize(FieldDef.ID);
- }
-
- }
-
- //##########################################################################
- // table Assembly; ID=0x20; p90, 21.2
-
- public static final class AssemblyDef extends Table {
- public static final int ID = 0x20;
-
- /** 4-byte constatnt of type AssemblyHashAlgorithm, clause 22.1.1 */
- public int HashAlgId;
-
- /** 2-byte constant */
- public int MajorVersion;
-
- /** 2-byte constant */
- public int MinorVersion;
-
- /** 2-byte constant */
- public int BuildNumber;
-
- /** 2-byte constant */
- public int RevisionNumber;
-
- /** 4-byte constant */
- public int Flags;
-
- /** index into #Blob */
- public int PublicKey;
-
- /** index into #String */
- public int Name;
-
- /** index into #String */
- public int Culture;
-
- public AssemblyDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- HashAlgId = readInt();
- MajorVersion = readShort();
- MinorVersion = readShort();
- BuildNumber = readShort();
- RevisionNumber = readShort();
- Flags = readInt();
- PublicKey = readBlobIndex();
- Name = readStringIndex();
- Culture = readStringIndex();
- }
-
- protected int getRowSize() {
- return 16 + file.getBlobIndexSize() + 2*file.getStringIndexSize();
- }
-
- } // class AssemblyDef
-
- //##########################################################################
- // table AssemblyProcessor; ID=0x21; p91, 21.4
-
- public static final class AssemblyProcessor extends Table {
- public static final int ID = 0x21;
-
- /** 4-byte constant. */
- public int Processor;
-
- public AssemblyProcessor(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Processor = readInt();
- }
-
- protected int getRowSize() { return 4; }
-
- }
-
- //##########################################################################
- // table AssemblyOS; ID = 0x22; p90, 21.3
-
- public static final class AssemblyOS extends Table {
- public static final int ID = 0x22;
-
- /** 4-byte constant. */
- public int OSPlatformID;
-
- /** 4-byte constant. */
- public int OSMajorVersion;
-
- /** 4-byte constant. */
- public int OSMinorVersion;
-
- public AssemblyOS(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- OSPlatformID = readInt();
- OSMajorVersion = readInt();
- OSMinorVersion = readInt();
- }
-
- protected int getRowSize() { return 12; }
-
- }
-
- //##########################################################################
- // table AssemblyRef; ID = 0x23; pp91, 21.5
-
- public static final class AssemblyRef extends Table {
- public static final int ID = 0x23;
-
- /** 2-byte constant. */
- public int MajorVersion;
-
- /** 2-byte constant. */
- public int MinorVersion;
-
- /** 2-byte constant. */
- public int BuildNumber;
-
- /** 2-byte constant. */
- public int RevisionNumber;
-
- /** 4-byte bitmask of type AssemblyFlags (22.1.2). */
- public int Flags;
-
- /** index into #Blob. */
- public int PublicKeyOrToken;
-
- /** index into #String. */
- public int Name;
-
- /** index into #String. */
- public int Culture;
-
- /** index into #Blob. */
- public int HashValue;
-
- public AssemblyRef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- MajorVersion = readShort();
- MinorVersion = readShort();
- BuildNumber = readShort();
- RevisionNumber = readShort();
- Flags = readInt();
- PublicKeyOrToken = readBlobIndex();
- Name = readStringIndex();
- Culture = readStringIndex();
- HashValue = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 12 + 2*file.getBlobIndexSize() + 2*file.getStringIndexSize();
- }
-
- public String getName() { return file.getString(Name); }
- }
-
- //##########################################################################
- // table AssemblyRefProcessor; ID=0x24; p92, 21.7
-
- public static final class AssemblyRefProcessor extends Table {
- public static final int ID = 0x24;
-
- /** 4-byte constant. */
- public int Processor;
-
- /** Index into the AssemblyRef table. */
- public int AssemblyRef;
-
- public AssemblyRefProcessor(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Processor = readInt();
- AssemblyRef = readTableIndex(Table.AssemblyRef.ID);
- }
-
- protected int getRowSize() {
- return 4 + file.getTableIndexSize(Table.AssemblyRef.ID);
- }
-
- } // class AssemblyRefProcessor
-
- //##########################################################################
- // table AssemblyRefOS; ID=0x25; p92, 21.6
-
- public static final class AssemblyRefOS extends Table {
- public static final int ID = 0x25;
-
- /** 4-byte constant. */
- public int OSPlatformId;
-
- /** 4-byte constant. */
- public int OSMajorVersion;
-
- /** 4-byte constant. */
- public int OSMinorVersion;
-
- /** Index into the AssemblyRef table. */
- public int AssemblyRef;
-
- public AssemblyRefOS(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- OSPlatformId = readInt();
- OSMajorVersion = readInt();
- OSMinorVersion = readInt();
- AssemblyRef = readTableIndex(Table.AssemblyRef.ID);
- }
-
- protected int getRowSize() {
- return 12 + file.getTableIndexSize(Table.AssemblyRef.ID);
- }
-
- } // class AssemblyRefOS
-
- //##########################################################################
- // table File; ID=0x26; p106, 21.19
-
- public static final class FileDef extends Table {
- public static final int ID = 0x26;
-
- /** 4-byte bitmask of type FileAttributes (22.1.6). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into #Blob. */
- public int HashValue;
-
- public FileDef(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Flags = readInt();
- Name = readStringIndex();
- HashValue = readBlobIndex();
- }
-
- protected int getRowSize() {
- return 4 + file.getStringIndexSize() + file.getBlobIndexSize();
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- } // class FileDef
-
- //##########################################################################
- // table ExportedType; ID=0x27; p100, 21.14
-
- public static final class ExportedType extends Table {
- public static final int ID = 0x27;
-
- /** 4-byte bitmask of type TypeAttribute (22.1.6). */
- public int Flags;
-
- /** 4-byte index into a TypeDef table of
- * another module in this assembly.
- */
- public int TypeDefId;
-
- /** Index into #String. */
- public int TypeName;
-
- /** Index into #Stream. */
- public int TypeNamespace;
-
- /** Index into one of two tables as follows:
- * - 'File' table, where that entry says which module
- * in the current assembly holds the TypeDef
- * - 'ExportedType' table, where that entry is
- * the enclosing Type of the current nested Type
- */
- public int Implementation;
-
- public ExportedType(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Flags = readInt();
- TypeDefId = readInt();
- TypeName = readStringIndex();
- TypeNamespace = readStringIndex();
- Implementation = readTableSetIndex(_Implementation);
- }
-
- protected int getRowSize() {
- return 8 + 2*file.getStringIndexSize() +
- file.getTableSetIndexSize(_Implementation);
- }
-
- public String getFullName() {
- String namespace = file.getString(TypeNamespace);
- return namespace.length() == 0 ? file.getString(TypeName)
- : namespace + "." + file.getString(TypeName);
- }
-
- } // class ExportedType
-
- //##########################################################################
- // table ManifestResource; ID=0x28; p108, 21.22
-
- public static final class ManifestResource extends Table {
- public static final int ID = 0x28;
-
- /** 4-byte constant. */
- public int Offset;
-
- /** 4-byte bitmask of type ManifestResourceAttributes (22.1.8). */
- public int Flags;
-
- /** Index into #String. */
- public int Name;
-
- /** Index into the Implementation table set. */
- public int Implementation;
-
- public ManifestResource(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- Offset = readInt();
- Flags = readInt();
- Name = readStringIndex();
- Implementation = readTableSetIndex(_Implementation);
- }
-
- protected int getRowSize() {
- return 8 + file.getStringIndexSize() +
- file.getTableSetIndexSize(_Implementation);
- }
-
- } // class ManifestResource
-
- //##########################################################################
- // table NestedClass; ID=0x29; p116, 21.29
-
- public static final class NestedClass extends Table {
- public static final int ID = 0x29;
-
- /** Index into the TypeDef table. */
- public int NestedClass;
-
- /** Index into the TypeDef table. */
- public int EnclosingClass;
-
- public NestedClass(PEFile file, int rows) { super(file, ID, rows); }
-
- protected void populateFields() {
- NestedClass = readTableIndex(TypeDef.ID);
- EnclosingClass = readTableIndex(TypeDef.ID);
- }
-
- protected int getRowSize() {
- return 2 * file.getTableIndexSize(TypeDef.ID);
- }
-
- } // class NestedClass
-
- //##########################################################################
- // table GenericParam; ID=0x2a; p137, 22.20
-
- public static final class GenericParam extends Table {
- public static final int ID = 0x2a;
-
- public int Number;
- public int Flags;
- public int Owner; // a TypeOrMethodDef (Sec 24.2.6) coded index
- public int Name; // a non-null index into the String heap
-
- private java.util.Map /*<Integer, java.util.Set<Integer>>*/ GenericParamIdxesForMethodDefIdx =
- new java.util.HashMap();
- private java.util.Map /*<Integer, java.util.Set<Integer>>*/ GenericParamIdxesForTypeDefIdx =
- new java.util.HashMap();
-
- private void addToMap(int key, int value, java.util.Map IdxesForIdx) {
- java.util.Set /*<Integer>*/ bucket = (java.util.Set)IdxesForIdx.get(Integer.valueOf(key));
- if(bucket == null) {
- bucket = new java.util.HashSet();
- IdxesForIdx.put(Integer.valueOf(key), bucket);
- }
- bucket.add(Integer.valueOf(value));
- }
-
- /** Indexes of rows in the GenericParam table representing type parameters defined by the type given by
- * its row index TypeDefIdx (in the TypeDef table).
- * No need to position the current record before invoking this method. */
- public int[] getTVarIdxes(int TypeDefIdx) {
- if(!mapsPopulated) {
- initMaps();
- }
- java.util.Set bucket = (java.util.Set)GenericParamIdxesForTypeDefIdx.get(Integer.valueOf(TypeDefIdx));
- if(bucket == null) {
- bucket = java.util.Collections.EMPTY_SET;
- }
- int[] res = new int[bucket.size()];
- java.util.Iterator /*<Integer>*/ it = bucket.iterator();
- for(int i = 0; i < bucket.size(); i++) {
- res[i] = ((Integer)it.next()).intValue();
- }
- return res;
- }
-
- /** Indexes of rows in the GenericParam table representing type parameters defined by the method given by
- * its row index MethodDefIdx (in the MethodDef table)
- * No need to position the current record before invoking this method. */
- public int[] getMVarIdxes(int MethodDefIdx) {
- if(!mapsPopulated) {
- initMaps();
- }
- java.util.Set bucket = (java.util.Set)GenericParamIdxesForMethodDefIdx.get(Integer.valueOf(MethodDefIdx));
- if(bucket == null) {
- bucket = java.util.Collections.EMPTY_SET;
- }
- int[] res = new int[bucket.size()];
- java.util.Iterator /*<Integer>*/ it = bucket.iterator();
- for(int i = 0; i < bucket.size(); i++) {
- res[i] = ((Integer)it.next()).intValue();
- }
- return res;
- }
-
- private boolean mapsPopulated = false;
-
- private void initMaps() {
- mapsPopulated = true;
- for (int currentParamRow = 1; currentParamRow <= rows; currentParamRow++) {
- int currentOwner = file.GenericParam(currentParamRow).Owner;
- int targetTableId = Table.getTableId(Table._TypeOrMethodDef, currentOwner);
- int targetRow = currentOwner >> Table.NoBits[Table._TypeOrMethodDef];
- if(targetTableId == TypeDef.ID){
- addToMap(targetRow, currentParamRow, GenericParamIdxesForTypeDefIdx);
- } else if(targetTableId == MethodDef.ID) {
- addToMap(targetRow, currentParamRow, GenericParamIdxesForMethodDefIdx);
- } else {
- throw new RuntimeException();
- }
- }
- }
-
- public GenericParam(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- protected void populateFields() {
- Number = readShort();
- Flags = readShort();
- Owner = readTableSetIndex(_TypeOrMethodDef);
- Name = readStringIndex();
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isInvariant() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x0003) == 0;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isCovariant() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x0003) == 1;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isContravariant() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x0003) == 2;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isReferenceType() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x001C) == 4;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean isValueType() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x001C) == 8;
- }
-
- /** This method assumes populateFields() has been just called to set Flags for the current record */
- public boolean hasDefaultConstructor() {
- /* 23.1.7 Flags for Generic Parameters [GenericParamAttributes tributes] */
- return (Flags & 0x001C) == 0x0010;
- }
-
- protected int getRowSize() {
- return 2 + 2 + file.getTableSetIndexSize(_TypeOrMethodDef) + file.getStringIndexSize();
- /* Columns:
- Number (2 bytes),
- Flags (2 bytes),
- Owner (coded token of type TypeOrMethodDef),
- Name (offset in the #Strings stream).
- */
- }
-
- public String getName() {
- return file.getString(Name);
- }
-
- } // class GenericParam
-
-
- //##########################################################################
- // table GenericParamConstraint; ID=0x2c; p139, 22.20
-
- public static final class GenericParamConstraint extends Table {
- public static final int ID = 0x2c;
-
- public int Owner; // an index into the GenericParam table
- public int Constraint; // a TypeDefOrRef (Sec 24.2.6) coded index
-
- public GenericParamConstraint(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- protected void populateFields() {
- Owner = readTableIndex(GenericParam.ID);
- Constraint = readTableSetIndex(_TypeDefOrRef);
- }
-
- protected int getRowSize() {
- return file.getTableIndexSize(GenericParam.ID) + file.getTableSetIndexSize(_TypeDefOrRef);
- /* Columns:
- Owner (RID in the GenericParam table),
- Constraint (coded token of type TypeDefOrRef).
- */
- }
-
- private boolean mapPopulated = false;
-
- /** Indexes of rows (in the TypeDef, TypeRef, or TypeSpec tables) denoting the base class (if any)
- * and interfaces (if any) that the generic parameter (of TVar or MVar kind) should support, where
- * that generic parameter is represented by its index into the GenericParam table. */
- public int[] getTypeDefOrRefIdxes(int genParamIdx) {
- if(!mapPopulated) {
- initMap();
- }
- java.util.Set bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(genParamIdx));
- if(bucket == null) {
- bucket = java.util.Collections.EMPTY_SET;
- }
- int[] res = new int[bucket.size()];
- java.util.Iterator /*<Integer>*/ it = bucket.iterator();
- for(int i = 0; i < bucket.size(); i++) {
- res[i] = ((Integer)it.next()).intValue();
- }
- return res;
- }
-
-
- private void initMap() {
- mapPopulated = true;
- for (int currentConstraintRow = 1; currentConstraintRow <= rows; currentConstraintRow++) {
- int targetGenericParam = file.GenericParamConstraint(currentConstraintRow).Owner;
- int value = file.GenericParamConstraint.Constraint;
- addToMap(targetGenericParam, value);
- }
- }
-
- private java.util.Map /*<Integer, java.util.Set<Integer>>*/ TypeDefOrRefIdxesForGenParamIdx =
- new java.util.HashMap();
-
- private void addToMap(int key, int value) {
- java.util.Set /*<Integer>*/ bucket = (java.util.Set)TypeDefOrRefIdxesForGenParamIdx.get(Integer.valueOf(key));
- if(bucket == null) {
- bucket = new java.util.HashSet();
- TypeDefOrRefIdxesForGenParamIdx.put(Integer.valueOf(key), bucket);
- }
- bucket.add(Integer.valueOf(value));
- }
-
- } // class GenericParamConstraint
-
- //##########################################################################
- // table MethodSpec; ID=0x2b; p149, in Sec. 22.29 of Partition II
-
- public static final class MethodSpec extends Table {
- public static final int ID = 0x2b;
-
- /* an index into the MethodDef or MemberRef table, specifying which generic method this row is an instantiation of.
- A MethodDefOrRef (Sec. 24.2.6) coded index */
- public int Method;
-
- /* an index into the Blob heap (Sec. 23.2.15), holding the signature of this instantiation */
- public int Instantiation;
-
- public MethodSpec(PEFile file, int rows) {
- super(file, ID, rows);
- this.newMapping = true;
- }
-
- protected void populateFields() {
- Method = readTableSetIndex(_MethodDefOrRef);
- Instantiation = readBlobIndex();
- }
-
- protected int getRowSize() {
- return file.getTableSetIndexSize(_MethodDefOrRef) + file.getBlobIndexSize();
- }
-
-
- } // class MethodSpec
- //##########################################################################
-
-} // class Table
diff --git a/src/partest/README b/src/partest/README
index 0434aa7499..17594dbb1e 100644
--- a/src/partest/README
+++ b/src/partest/README
@@ -24,7 +24,6 @@ Other arguments:
* --run next files test the interpreter and all backends
* --jvm next files test the JVM backend
* --res next files test the resident compiler
- * --buildmanager next files test the build manager
* --shootout next files are shootout tests
* --script next files test the script runner
* ''-Dpartest.scalac_opts=...'' -> add compiler options
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index 848deef8c5..df4a81dee2 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -21,7 +21,7 @@ abstract class CompilerTest extends DirectTest {
lazy val global: Global = newCompiler()
lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
import global._
- import definitions._
+ import definitions.{ compilerTypeFromTag }
override def extraSettings = "-usejavacp -d " + testOutput.path
@@ -32,7 +32,6 @@ abstract class CompilerTest extends DirectTest {
def sources: List[String] = List(code)
// Utility functions
-
class MkType(sym: Symbol) {
def apply[M](implicit t: ru.TypeTag[M]): Type =
if (sym eq NoSymbol) NoType
@@ -50,7 +49,7 @@ abstract class CompilerTest extends DirectTest {
}
class SymsInPackage(pkgName: String) {
- def pkg = rootMirror.getRequiredPackage(pkgName)
+ def pkg = rootMirror.getPackage(pkgName)
def classes = allMembers(pkg) filter (_.isClass)
def modules = allMembers(pkg) filter (_.isModule)
def symbols = classes ++ terms filterNot (_ eq NoSymbol)
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index e2dac2fd55..7f9ca3a321 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -6,7 +6,6 @@
package scala.tools.partest
import scala.tools.nsc._
-import io.Directory
import util.{ SourceFile, BatchSourceFile, CommandLineParser }
import reporters.{Reporter, ConsoleReporter}
@@ -21,8 +20,8 @@ abstract class DirectTest extends App {
def show(): Unit
// the test file or dir, and output directory
- def testPath = io.File(sys.props("partest.test-path"))
- def testOutput = io.Directory(sys.props("partest.output"))
+ def testPath = SFile(sys.props("partest.test-path"))
+ def testOutput = Directory(sys.props("partest.output"))
// override to add additional settings with strings
def extraSettings: String = ""
@@ -42,10 +41,7 @@ abstract class DirectTest extends App {
newCompiler(settings)
}
- def newCompiler(settings: Settings): Global = {
- if (settings.Yrangepos.value) new Global(settings, reporter(settings)) with interactive.RangePositions
- else new Global(settings, reporter(settings))
- }
+ def newCompiler(settings: Settings): Global = Global(settings, reporter(settings))
def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
diff --git a/src/partest/scala/tools/partest/IcodeTest.scala b/src/partest/scala/tools/partest/IcodeTest.scala
index f5333cc5f9..b12ec0de61 100644
--- a/src/partest/scala/tools/partest/IcodeTest.scala
+++ b/src/partest/scala/tools/partest/IcodeTest.scala
@@ -5,9 +5,7 @@
package scala.tools.partest
-import scala.tools.nsc._
-import nest.FileUtil._
-import io.Directory
+import scala.tools.partest.nest.FileUtil.compareContents
/** A trait for testing icode. All you need is this in a
* partest source file:
diff --git a/src/partest/scala/tools/partest/JavapTest.scala b/src/partest/scala/tools/partest/JavapTest.scala
new file mode 100644
index 0000000000..3cb3dc6ca8
--- /dev/null
+++ b/src/partest/scala/tools/partest/JavapTest.scala
@@ -0,0 +1,26 @@
+
+package scala.tools.partest
+
+import scala.util.{Try,Success,Failure}
+import java.lang.System.{out => sysout}
+
+/** A trait for testing repl's javap command
+ * or possibly examining its output.
+ */
+abstract class JavapTest extends ReplTest {
+
+ /** Your Assertion Here, whatever you want to bejahen.
+ * Assertions must be satisfied by all flavors of javap
+ * and should not be fragile with respect to compiler output.
+ */
+ def yah(res: Seq[String]): Boolean
+
+ def baddies = List(":javap unavailable", ":javap not yet working")
+
+ // give it a pass if javap is broken
+ override def show() = try {
+ val res = eval().toSeq
+ val unsupported = res exists (s => baddies exists (s contains _))
+ assert ((unsupported || yah(res)), res.mkString("","\n","\n"))
+ } catch { case ae: AssertionError => ae.printStackTrace(sysout) }
+}
diff --git a/src/partest/scala/tools/partest/PartestDefaults.scala b/src/partest/scala/tools/partest/PartestDefaults.scala
index a21c602d14..8478edeb4d 100644
--- a/src/partest/scala/tools/partest/PartestDefaults.scala
+++ b/src/partest/scala/tools/partest/PartestDefaults.scala
@@ -1,15 +1,11 @@
package scala.tools
package partest
-import nsc.io.{ File, Path, Directory }
-import scala.tools.util.PathResolver
-import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
-import java.lang.Runtime.getRuntime
+import scala.concurrent.duration.Duration
+import scala.tools.nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+import java.lang.Runtime.{ getRuntime => runtime }
object PartestDefaults {
- import nsc.Properties._
- private def wrapAccessControl[T](body: => Option[T]): Option[T] =
- try body catch { case _: java.security.AccessControlException => None }
def testRootName = propOrNone("partest.root")
def srcDirName = propOrElse("partest.srcdir", "files")
@@ -25,7 +21,8 @@ object PartestDefaults {
def testBuild = propOrNone("partest.build")
def errorCount = propOrElse("partest.errors", "0").toInt
- def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse getRuntime.availableProcessors
+ def numThreads = propOrNone("partest.threads") map (_.toInt) getOrElse runtime.availableProcessors
+ def waitTime = propOrNone("partest.timeout") map (Duration.apply) getOrElse Duration("4 hours")
- def timeout = "1200000"
+ //def timeout = "1200000" // per-test timeout
}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 0199400ada..b5b09a753a 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -10,14 +10,10 @@ package scala.tools
package partest
import scala.util.Properties.setProp
-import scala.tools.nsc.io.{ Directory, Path => SPath }
-import nsc.util.ClassPath
-import util.PathResolver
import scala.tools.ant.sabbus.CompilationPathProperty
-import java.io.File
import java.lang.reflect.Method
import org.apache.tools.ant.Task
-import org.apache.tools.ant.types.{Path, Reference, FileSet}
+import org.apache.tools.ant.types.{ Reference, FileSet}
import org.apache.tools.ant.types.Commandline.Argument
/** An Ant task to execute the Scala test suite (NSC).
@@ -26,97 +22,42 @@ import org.apache.tools.ant.types.Commandline.Argument
* - `srcdir`,
* - `classpath`,
* - `classpathref`,
- * - `showlog`,
- * - `showdiff`,
* - `erroronfailed`,
* - `javacmd`,
* - `javaccmd`,
* - `scalacopts`,
- * - `timeout`,
* - `debug`,
* - `junitreportdir`.
*
* It also takes the following parameters as nested elements:
* - `compilationpath`.
- * - `postests`,
- * - `negtests`,
- * - `runtests`,
- * - `jvmtests`,
- * - `residenttests`,
- * - `buildmanagertests`,
- * - `shootouttests`,
- * - `scalaptests`,
- * - `scalachecktests`,
- * - `specializedtests`,
- * - `instrumentedtests`,
- * - `presentationtests`,
- * - `scripttests`.
*
* @author Philippe Haller
*/
class PartestTask extends Task with CompilationPathProperty {
-
- def addConfiguredPosTests(input: FileSet) {
- posFiles = Some(input)
- }
-
- def addConfiguredNegTests(input: FileSet) {
- negFiles = Some(input)
- }
-
- def addConfiguredRunTests(input: FileSet) {
- runFiles = Some(input)
- }
-
- def addConfiguredJvmTests(input: FileSet) {
- jvmFiles = Some(input)
- }
-
- def addConfiguredResidentTests(input: FileSet) {
- residentFiles = Some(input)
- }
-
- def addConfiguredBuildManagerTests(input: FileSet) {
- buildManagerFiles = Some(input)
- }
-
- def addConfiguredScalacheckTests(input: FileSet) {
- scalacheckFiles = Some(input)
- }
-
- def addConfiguredScriptTests(input: FileSet) {
- scriptFiles = Some(input)
- }
-
- def addConfiguredShootoutTests(input: FileSet) {
- shootoutFiles = Some(input)
- }
-
- def addConfiguredScalapTests(input: FileSet) {
- scalapFiles = Some(input)
- }
-
- def addConfiguredSpecializedTests(input: FileSet) {
- specializedFiles = Some(input)
- }
-
- def addConfiguredInstrumentedTests(input: FileSet) {
- instrumentedFiles = Some(input)
- }
-
- def addConfiguredPresentationTests(input: FileSet) {
- presentationFiles = Some(input)
- }
-
- def addConfiguredAntTests(input: FileSet) {
- antFiles = Some(input)
- }
-
+ type Path = org.apache.tools.ant.types.Path
+
+ private var kinds: List[String] = Nil
+ private var classpath: Option[Path] = None
+ private var debug = false
+ private var errorOnFailed: Boolean = true
+ private var jUnitReportDir: Option[File] = None
+ private var javaccmd: Option[File] = None
+ private var javacmd: Option[File] = Option(sys.props("java.home")) map (x => new File(x, "bin/java"))
+ private var scalacArgs: Option[Seq[Argument]] = None
+ private var srcDir: Option[String] = None
+ private var colors: Int = 0
def setSrcDir(input: String) {
srcDir = Some(input)
}
+ def setColors(input: String) {
+ try colors = input.toInt catch { case _: NumberFormatException => () }
+ if (colors > 0)
+ sys.props("partest.colors") = colors.toString
+ }
+
def setClasspath(input: Path) {
if (classpath.isEmpty)
classpath = Some(input)
@@ -132,15 +73,6 @@ class PartestTask extends Task with CompilationPathProperty {
def setClasspathref(input: Reference) {
createClasspath().setRefid(input)
}
-
- def setShowLog(input: Boolean) {
- showLog = input
- }
-
- def setShowDiff(input: Boolean) {
- showDiff = input
- }
-
def setErrorOnFailed(input: Boolean) {
errorOnFailed = input
}
@@ -149,6 +81,10 @@ class PartestTask extends Task with CompilationPathProperty {
javacmd = Some(input)
}
+ def setKinds(input: String) {
+ kinds = words(input)
+ }
+
def setJavacCmd(input: File) {
javaccmd = Some(input)
}
@@ -164,10 +100,6 @@ class PartestTask extends Task with CompilationPathProperty {
a
}
- def setTimeout(delay: String) {
- timeout = Some(delay)
- }
-
def setDebug(input: Boolean) {
debug = input
}
@@ -176,176 +108,35 @@ class PartestTask extends Task with CompilationPathProperty {
jUnitReportDir = Some(input)
}
- private var classpath: Option[Path] = None
- private var srcDir: Option[String] = None
- private var javacmd: Option[File] = None
- private var javaccmd: Option[File] = None
- private var showDiff: Boolean = false
- private var showLog: Boolean = false
- private var runFailed: Boolean = false
- private var posFiles: Option[FileSet] = None
- private var negFiles: Option[FileSet] = None
- private var runFiles: Option[FileSet] = None
- private var jvmFiles: Option[FileSet] = None
- private var residentFiles: Option[FileSet] = None
- private var buildManagerFiles: Option[FileSet] = None
- private var scalacheckFiles: Option[FileSet] = None
- private var scriptFiles: Option[FileSet] = None
- private var shootoutFiles: Option[FileSet] = None
- private var scalapFiles: Option[FileSet] = None
- private var specializedFiles: Option[FileSet] = None
- private var instrumentedFiles: Option[FileSet] = None
- private var presentationFiles: Option[FileSet] = None
- private var antFiles: Option[FileSet] = None
- private var errorOnFailed: Boolean = false
- private var scalacArgs: Option[Seq[Argument]] = None
- private var timeout: Option[String] = None
- private var jUnitReportDir: Option[File] = None
- private var debug = false
-
- def fileSetToDir(fs: FileSet) = Directory(fs getDir getProject)
- def fileSetToArray(fs: FileSet): Array[SPath] = {
- val root = fileSetToDir(fs)
- (fs getDirectoryScanner getProject).getIncludedFiles map (root / _)
- }
-
- private def getFiles(fileSet: Option[FileSet]): Array[File] = fileSet match {
- case None => Array()
- case Some(fs) => fileSetToArray(fs) filterNot (_ hasExtension "log") map (_.jfile)
- }
-
- private def getFilesAndDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
- case None => Array()
- case Some(fs) =>
- def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
- // println("----> " + fileSet)
-
- val fileTests = getFiles(Some(fs)) filterNot (x => shouldExclude(x.getName))
- val dirResult = getDirs(Some(fs)) filterNot (x => shouldExclude(x.getName))
- // println("dirs: " + dirResult.toList)
- // println("files: " + fileTests.toList)
-
- dirResult ++ fileTests
- }
-
- private def getDirs(fileSet: Option[FileSet]): Array[File] = fileSet match {
- case None => Array()
- case Some(fs) =>
- def shouldExclude(name: String) = (name endsWith ".obj") || (name startsWith ".")
-
- val dirTests: Iterator[SPath] = fileSetToDir(fs).dirs filterNot (x => shouldExclude(x.name))
- val dirResult = dirTests.toList.toArray map (_.jfile)
-
- dirResult
- }
-
-
- private def getPosFiles = getFilesAndDirs(posFiles)
- private def getNegFiles = getFilesAndDirs(negFiles)
- private def getRunFiles = getFilesAndDirs(runFiles)
- private def getJvmFiles = getFilesAndDirs(jvmFiles)
- private def getResidentFiles = getFiles(residentFiles)
- private def getBuildManagerFiles = getFilesAndDirs(buildManagerFiles)
- private def getScalacheckFiles = getFilesAndDirs(scalacheckFiles)
- private def getScriptFiles = getFiles(scriptFiles)
- private def getShootoutFiles = getFiles(shootoutFiles)
- private def getScalapFiles = getFiles(scalapFiles)
- private def getSpecializedFiles = getFiles(specializedFiles)
- private def getInstrumentedFiles = getFilesAndDirs(instrumentedFiles)
- private def getPresentationFiles = getDirs(presentationFiles)
- private def getAntFiles = getFiles(antFiles)
-
override def execute() {
- val opts = getProject().getProperties() get "env.PARTEST_OPTS"
- if (opts != null && opts.toString != "")
- opts.toString.split(" ") foreach { propDef =>
- log("setting system property " + propDef)
- val kv = propDef split "="
- val key = kv(0) substring 2
- val value = kv(1)
- setProp(key, value)
- }
-
- if (isPartestDebug || debug) {
- setProp("partest.debug", "true")
- nest.NestUI._verbose = true
+ if (debug || sys.props.contains("partest.debug")) {
+ nest.NestUI.setDebug()
}
srcDir foreach (x => setProp("partest.srcdir", x))
val classpath = this.compilationPath getOrElse sys.error("Mandatory attribute 'compilationPath' is not set.")
-
- val scalaLibrary = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-library.jar" => true
- case "library" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala library.")
-
- val scalaReflect = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-reflect.jar" => true
- case "reflect" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala reflection library.")
-
- val scalaCompiler = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-compiler.jar" => true
- case "compiler" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala compiler.")
-
- val scalaPartest = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-partest.jar" => true
- case "partest" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala partest.")
-
- val scalaActors = {
- (classpath.list map { fs => new File(fs) }) find { f =>
- f.getName match {
- case "scala-actors.jar" => true
- case "actors" if (f.getParentFile.getName == "classes") => true
- case _ => false
- }
- }
- } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+ val cpfiles = classpath.list map { fs => new File(fs) } toList
+ def findCp(name: String) = cpfiles find (f =>
+ (f.getName == s"scala-$name.jar")
+ || (f.absolutePathSegments endsWith Seq("classes", name))
+ ) getOrElse sys.error(s"Provided classpath does not contain a Scala $name element.")
+
+ val scalaLibrary = findCp("library")
+ val scalaReflect = findCp("reflect")
+ val scalaCompiler = findCp("compiler")
+ val scalaPartest = findCp("partest")
+ val scalaActors = findCp("actors")
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
- if(parts eq null) Seq[String]() else parts.toSeq
+ if (parts eq null) Nil else parts.toSeq
})
val antRunner = new scala.tools.partest.nest.AntRunner
val antFileManager = antRunner.fileManager
- // this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, this paragraph of code can be safely removed
- // we hack into the classloader that will become parent classloader for scalac
- // this way we ensure that reflective macro lookup will pick correct Code.lift
- val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader]
- val path = new org.apache.tools.ant.types.Path(getProject())
- val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath)
- path.setPath(newClassPath)
- loader.setClassPath(path)
-
- antFileManager.showDiff = showDiff
- antFileManager.showLog = showLog
- antFileManager.failed = runFailed
+ // antFileManager.failed = runFailed
antFileManager.CLASSPATH = ClassPath.join(classpath.list: _*)
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
antFileManager.LATEST_REFLECT = scalaReflect.getAbsolutePath
@@ -356,54 +147,35 @@ class PartestTask extends Task with CompilationPathProperty {
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
- timeout foreach (antFileManager.timeout = _)
-
- type TFSet = (Array[File], String, String)
- val testFileSets = List(
- (getPosFiles, "pos", "Compiling files that are expected to build"),
- (getNegFiles, "neg", "Compiling files that are expected to fail"),
- (getRunFiles, "run", "Compiling and running files"),
- (getJvmFiles, "jvm", "Compiling and running files"),
- (getResidentFiles, "res", "Running resident compiler scenarii"),
- (getBuildManagerFiles, "buildmanager", "Running Build Manager scenarii"),
- (getScalacheckFiles, "scalacheck", "Running scalacheck tests"),
- (getScriptFiles, "script", "Running script files"),
- (getShootoutFiles, "shootout", "Running shootout tests"),
- (getScalapFiles, "scalap", "Running scalap tests"),
- (getSpecializedFiles, "specialized", "Running specialized files"),
- (getInstrumentedFiles, "instrumented", "Running instrumented files"),
- (getPresentationFiles, "presentation", "Running presentation compiler test files"),
- (getAntFiles, "ant", "Running ant task tests")
- )
-
- def runSet(set: TFSet): (Int, Int, Iterable[String]) = {
- val (files, name, msg) = set
+
+ def runSet(kind: String, files: Array[File]): (Int, Int, List[String]) = {
if (files.isEmpty) (0, 0, List())
else {
- log(msg)
- val results: Iterable[(String, TestState)] = antRunner.reflectiveRunTestsForFiles(files, name)
- val (succs, fails) = resultsToStatistics(results)
+ log(s"Running ${files.length} tests in '$kind' at $now")
+ // log(s"Tests: ${files.toList}")
+ val results = antRunner.reflectiveRunTestsForFiles(files, kind)
+ val (passed, failed) = results partition (_.isOk)
+ val numPassed = passed.size
+ val numFailed = failed.size
+ def failedMessages = failed map (_.longStatus)
- val failed: Iterable[String] = results collect {
- case (path, TestState.Fail) => path + " [FAILED]"
- case (path, TestState.Timeout) => path + " [TIMOUT]"
- }
+ log(s"Completed '$kind' at $now")
// create JUnit Report xml files if directory was specified
jUnitReportDir foreach { d =>
d.mkdir
- val report = testReport(name, results, succs, fails)
- scala.xml.XML.save(d.getAbsolutePath+"/"+name+".xml", report)
+ val report = testReport(kind, results, numPassed, numFailed)
+ scala.xml.XML.save(d.getAbsolutePath+"/"+kind+".xml", report)
}
- (succs, fails, failed)
+ (numPassed, numFailed, failedMessages)
}
}
- val _results = testFileSets map runSet
- val allSuccesses = _results map (_._1) sum
- val allFailures = _results map (_._2) sum
+ val _results = kinds map (k => runSet(k, TestKinds testsFor k map (_.jfile) toArray))
+ val allSuccesses = _results map (_._1) sum
+ val allFailures = _results map (_._2) sum
val allFailedPaths = _results flatMap (_._3)
def f = if (errorOnFailed && allFailures > 0) (sys error _) else log(_: String)
@@ -418,20 +190,17 @@ class PartestTask extends Task with CompilationPathProperty {
f(msg)
}
- private def oneResult(res: (String, TestState)) =
- <testcase name={res._1}>{
- res._2 match {
- case TestState.Ok => scala.xml.NodeSeq.Empty
- case TestState.Fail => <failure message="Test failed"/>
- case TestState.Timeout => <failure message="Test timed out"/>
- }
+ private def oneResult(res: TestState) =
+ <testcase name={res.testIdent}>{
+ if (res.isOk) scala.xml.NodeSeq.Empty
+ else <failure message="Test failed"/>
}</testcase>
- private def testReport(kind: String, results: Iterable[(String, TestState)], succs: Int, fails: Int) =
+ private def testReport(kind: String, results: Iterable[TestState], succs: Int, fails: Int) =
<testsuite name={kind} tests={(succs + fails).toString} failures={fails.toString}>
<properties/>
{
- results.map(oneResult(_))
+ results map oneResult
}
</testsuite>
}
diff --git a/src/partest/scala/tools/partest/SecurityTest.scala b/src/partest/scala/tools/partest/SecurityTest.scala
index 2d6f61d0b1..1f1c8a95ea 100644
--- a/src/partest/scala/tools/partest/SecurityTest.scala
+++ b/src/partest/scala/tools/partest/SecurityTest.scala
@@ -10,23 +10,10 @@ import java.util._
abstract class SecurityTest extends App {
def throwIt(x: Any) = throw new AccessControlException("" + x)
-
- def readPerm(p: PropertyPermission) = p.getActions contains "read"
- def writePerm(p: PropertyPermission) = p.getActions contains "write"
def propertyCheck(p: PropertyPermission): Unit = throwIt(p)
def check(perm: Permission): Unit = perm match {
case p: PropertyPermission => propertyCheck(p)
case _ => ()
}
-
- lazy val sm = new SecurityManager {
- // these two are the choke points for all permissions checks
- override def checkPermission(perm: Permission): Unit = check(perm)
- override def checkPermission(perm: Permission, context: Object): Unit = check(perm)
- }
- def securityOn(): Boolean = {
- try { System.setSecurityManager(sm) ; true }
- catch { case _: SecurityException => false }
- }
}
diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala
new file mode 100644
index 0000000000..ec682690ca
--- /dev/null
+++ b/src/partest/scala/tools/partest/TestKinds.scala
@@ -0,0 +1,67 @@
+package scala.tools
+package partest
+
+import nest.PathSettings.srcDir
+
+object TestKinds {
+ val standardKinds = "pos neg run jvm res buildmanager scalacheck scalap specialized instrumented presentation ant" split "\\s+" toList
+ val standardArgs = standardKinds map ("--" + _)
+
+ def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml")
+ def denotesTestDir(p: Path) = kindOf(p) match {
+ case "res" => false
+ case _ => p.isDirectory && p.extension == ""
+ }
+ def denotesTestPath(p: Path) = denotesTestDir(p) || denotesTestFile(p)
+
+ // TODO
+ def isTestForPartest(p: Path) = (
+ (p.name == "intentional-failure.scala")
+ || (p.path contains "test-for-partest")
+ )
+
+ def kindOf(p: Path) = {
+ p.toAbsolute.segments takeRight 2 head
+
+ // (srcDir relativize p.toCanonical).segments match {
+ // case (".." :: "scaladoc" :: xs) => xs.head
+ // case xs => xs.head
+ // }
+ }
+ def logOf(p: Path) = {
+ p.parent / s"${p.stripExtension}-${kindOf(p)}.log"
+ // p.parent / s"${p.stripExtension}.log"
+ }
+
+ // true if a test path matches the --grep expression.
+ private def pathMatchesExpr(path: Path, expr: String) = {
+ // Matches the expression if any source file contains the expr,
+ // or if the checkfile contains it, or if the filename contains
+ // it (the last is case-insensitive.)
+ def matches(p: Path) = (
+ (p.path.toLowerCase contains expr.toLowerCase)
+ || (p.fileContents contains expr)
+ )
+ def candidates = {
+ (path changeExtension "check") +: {
+ if (path.isFile) List(path)
+ else path.toDirectory.deepList() filter (_.isJavaOrScala) toList
+ }
+ }
+
+ (candidates exists matches)
+ }
+
+ def groupedTests(paths: List[Path]): List[(String, List[Path])] =
+ (paths.distinct groupBy kindOf).toList sortBy (standardKinds indexOf _._1)
+
+ /** Includes tests for testing partest. */
+ private def allTestsForKind(kind: String): List[Path] =
+ (srcDir / kind toDirectory).list.toList filter denotesTestPath
+
+ def testsForPartest: List[Path] = standardKinds flatMap allTestsForKind filter isTestForPartest
+ def testsFor(kind: String): List[Path] = allTestsForKind(kind) filterNot isTestForPartest
+ def grepFor(expr: String): List[Path] = standardTests filter (t => pathMatchesExpr(t, expr))
+ def standardTests: List[Path] = standardKinds flatMap testsFor
+ def failedTests: List[Path] = standardTests filter (p => logOf(p).isFile)
+}
diff --git a/src/partest/scala/tools/partest/TestState.scala b/src/partest/scala/tools/partest/TestState.scala
new file mode 100644
index 0000000000..dbe8a222a5
--- /dev/null
+++ b/src/partest/scala/tools/partest/TestState.scala
@@ -0,0 +1,65 @@
+package scala.tools.partest
+
+import scala.tools.nsc.FatalError
+import scala.tools.nsc.util.stackTraceString
+
+sealed abstract class TestState {
+ def testFile: File
+ def what: String
+ def reason: String
+ def transcript: List[String]
+
+ def isOk = false
+ def isSkipped = false
+ def testIdent = testFile.testIdent
+ def transcriptString = transcript mkString EOL
+
+ def identAndReason = testIdent + reasonString
+ def status = s"$what - $identAndReason"
+ def longStatus = status + transcriptString
+ def reasonString = if (reason == "") "" else s" [$reason]"
+
+ def shortStatus = if (isOk) "ok" else "!!"
+
+ override def toString = status
+}
+
+object TestState {
+ case class Uninitialized(testFile: File) extends TestState {
+ def what = "uninitialized"
+ def reason = what
+ def transcript = Nil
+ override def shortStatus = "??"
+ }
+ case class Pass(testFile: File) extends TestState {
+ def what = "pass"
+ override def isOk = true
+ def transcript: List[String] = Nil
+ def reason = ""
+ }
+ case class Updated(testFile: File) extends TestState {
+ def what = "updated"
+ override def isOk = true
+ def transcript: List[String] = Nil
+ def reason = "updated check file"
+ override def shortStatus = "++"
+ }
+ case class Skip(testFile: File, reason: String) extends TestState {
+ def what = "skip"
+ override def isOk = true
+ override def isSkipped = true
+ def transcript: List[String] = Nil
+ override def shortStatus = "--"
+ }
+ case class Fail(testFile: File, reason: String, transcript: List[String]) extends TestState {
+ def what = "fail"
+ }
+ case class Crash(testFile: File, caught: Throwable, transcript: List[String]) extends TestState {
+ def what = "crash"
+ def reason = s"caught $caught_s - ${caught.getMessage}"
+
+ private def caught_s = (caught.getClass.getName split '.').last
+ private def stack_s = stackTraceString(caught)
+ override def transcriptString = nljoin(super.transcriptString, caught_s)
+ }
+}
diff --git a/src/partest/scala/tools/partest/TestUtil.scala b/src/partest/scala/tools/partest/TestUtil.scala
index 9bfd444180..5c177ac962 100644
--- a/src/partest/scala/tools/partest/TestUtil.scala
+++ b/src/partest/scala/tools/partest/TestUtil.scala
@@ -24,14 +24,6 @@ trait TestUtil {
}
def nanos(body: => Unit): Long = alsoNanos(body)._1
- def verifySpeed(body1: => Unit, body2: => Unit, acceptableMultiple: Double) = {
- val t1 = nanos(body1).toDouble
- val t2 = nanos(body2).toDouble
- val mult = if (t1 > t2) t1 / t2 else t2 / t1
-
- assert(mult <= acceptableMultiple, "Performance difference too great: multiple = " + mult)
- }
-
def intercept[T <: Exception : ClassTag](code: => Unit): Unit =
try {
code
@@ -41,6 +33,6 @@ trait TestUtil {
}
}
+// Used in tests.
object TestUtil extends TestUtil {
-
}
diff --git a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
index 8a284b313b..18dd740208 100644
--- a/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
+++ b/src/partest/scala/tools/partest/instrumented/Instrumentation.scala
@@ -78,6 +78,7 @@ object Instrumentation {
!t.className.startsWith("scala/util/DynamicVariable")
}
+ // Used in tests.
def printStatistics(stats: Statistics = getStatistics, filter: MethodCallTrace => Boolean = standardFilter): Unit = {
val stats = getStatistics
println("Method call statistics:")
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 93045b8c1d..1d3b79171b 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -10,24 +10,21 @@
package scala.tools.partest
package nest
-import java.io.File
-import scala.tools.nsc.io.{ Directory }
-
class AntRunner extends DirectRunner {
val fileManager = new FileManager {
- var JAVACMD: String = "java"
- var JAVAC_CMD: String = "javac"
- var CLASSPATH: String = _
- var LATEST_LIB: String = _
- var LATEST_REFLECT: String = _
- var LATEST_COMP: String = _
- var LATEST_PARTEST: String = _
- var LATEST_ACTORS: String = _
- val testRootPath: String = "test"
- val testRootDir: Directory = Directory(testRootPath)
+ var JAVACMD: String = "java"
+ var JAVAC_CMD: String = "javac"
+ var CLASSPATH: String = _
+ var LATEST_LIB: String = _
+ var LATEST_REFLECT: String = _
+ var LATEST_COMP: String = _
+ var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
+ val testRootPath: String = "test"
+ val testRootDir: Directory = Directory(testRootPath)
}
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String) =
+ def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): List[TestState] =
runTestsForFiles(kindFiles.toList, kind)
}
diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala
deleted file mode 100644
index 3d902d6d00..0000000000
--- a/src/partest/scala/tools/partest/nest/CompileManager.scala
+++ /dev/null
@@ -1,164 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io }
-import scala.tools.nsc.io.{ File => SFile }
-import scala.tools.nsc.interactive.RangePositions
-import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
-import scala.tools.nsc.util.{ ClassPath, FakePos }
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
-import scala.tools.util.PathResolver
-import io.Path
-import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter }
-import File.pathSeparator
-
-sealed abstract class CompilationOutcome {
- def merge(other: CompilationOutcome): CompilationOutcome
- def isPositive = this eq CompileSuccess
- def isNegative = this eq CompileFailed
-}
-case object CompileSuccess extends CompilationOutcome {
- def merge(other: CompilationOutcome) = other
-}
-case object CompileFailed extends CompilationOutcome {
- def merge(other: CompilationOutcome) = if (other eq CompileSuccess) this else other
-}
-case object CompilerCrashed extends CompilationOutcome {
- def merge(other: CompilationOutcome) = this
-}
-
-class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
- shortname = true
-}
-
-class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
- def this(cp: String) = this(cp, _ => ())
-
- nowarnings.value = false
- encoding.value = "UTF-8"
- classpath.value = cp
-}
-
-abstract class SimpleCompiler {
- def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome
-}
-
-class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler {
- def newGlobal(settings: Settings, reporter: Reporter): Global =
- if (settings.Yrangepos.value)
- new Global(settings, reporter) with RangePositions
- else
- new Global(settings, reporter)
-
- def newGlobal(settings: Settings, logWriter: FileWriter): Global =
- newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
-
- def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
- def newSettings(outdir: String): TestSettings = {
- val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
- val s = new TestSettings(cp)
- s.outdir.value = outdir
-
- s
- }
-
- private def updatePluginPath(options: String): String = {
- val dir = fileManager.testRootDir
- def absolutize(path: String) = Path(path) match {
- case x if x.isAbsolute => x.path
- case x => (fileManager.testRootDir / x).toAbsolute.path
- }
-
- val (opt1, opt2) = (options split "\\s").toList partition (_ startsWith "-Xplugin:")
- val plugins = opt1 map (_ stripPrefix "-Xplugin:") flatMap (_ split pathSeparator) map absolutize
- val pluginOption = if (opt1.isEmpty) Nil else List("-Xplugin:" + (plugins mkString pathSeparator))
-
- (opt2 ::: pluginOption) mkString " "
- }
-
- def compile(out: Option[File], files: List[File], kind: String, log: File): CompilationOutcome = {
- val testSettings = out match {
- case Some(f) => newSettings(f.getAbsolutePath)
- case _ => newSettings()
- }
- val logWriter = new FileWriter(log)
-
- // check whether there is a ".flags" file
- val logFile = basename(log.getName)
- val flagsFileName = "%s.flags" format (logFile.substring(0, logFile.lastIndexOf("-")))
- val argString = (io.File(log).parent / flagsFileName) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
-
- // slurp local flags (e.g., "A_1.flags")
- val fstFile = SFile(files(0))
- def isInGroup(num: Int) = fstFile.stripExtension endsWith ("_" + num)
- val inGroup = (1 to 9) flatMap (group => if (isInGroup(group)) List(group) else List())
- val localFlagsList = if (inGroup.nonEmpty) {
- val localArgString = (fstFile.parent / (fstFile.stripExtension + ".flags")) ifFile (x => updatePluginPath(x.slurp())) getOrElse ""
- localArgString.split(' ').toList.filter(_.length > 0)
- } else List()
-
- val allOpts = fileManager.SCALAC_OPTS.toList ::: argString.split(' ').toList.filter(_.length > 0) ::: localFlagsList
- val args = allOpts.toList
-
- NestUI.verbose("scalac options: "+allOpts)
-
- val command = new CompilerCommand(args, testSettings)
- val global = newGlobal(command.settings, logWriter)
- val testRep: ExtConsoleReporter = global.reporter.asInstanceOf[ExtConsoleReporter]
-
- val testFileFn: (File, FileManager) => TestFile = kind match {
- case "pos" => PosTestFile.apply
- case "neg" => NegTestFile.apply
- case "run" => RunTestFile.apply
- case "jvm" => JvmTestFile.apply
- case "shootout" => ShootoutTestFile.apply
- case "scalap" => ScalapTestFile.apply
- case "scalacheck" => ScalaCheckTestFile.apply
- case "specialized" => SpecializedTestFile.apply
- case "instrumented" => InstrumentedTestFile.apply
- case "presentation" => PresentationTestFile.apply
- case "ant" => AntTestFile.apply
- }
- val test: TestFile = testFileFn(files.head, fileManager)
- if (!test.defineSettings(command.settings, out.isEmpty)) {
- testRep.error(FakePos("partest"), test.flags match {
- case Some(flags) => "bad flags: " + flags
- case _ => "bad settings: " + command.settings
- })
- }
-
- val toCompile = files map (_.getPath)
-
- try {
- NestUI.verbose("compiling "+toCompile)
- NestUI.verbose("with classpath: "+global.classPath.toString)
- NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path"))
- try new global.Run compile toCompile
- catch {
- case FatalError(msg) =>
- testRep.error(null, "fatal error: " + msg)
- return CompilerCrashed
- }
-
- testRep.printSummary()
- testRep.writer.close()
- }
- finally logWriter.close()
-
- if (testRep.hasErrors) CompileFailed
- else CompileSuccess
- }
-}
-
-class CompileManager(val fileManager: FileManager) {
- private def newCompiler = new DirectCompiler(fileManager)
- def attemptCompile(outdir: Option[File], sources: List[File], kind: String, log: File): CompilationOutcome =
- newCompiler.compile(outdir, sources, kind, log)
-}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 08e709de90..b436675d3a 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -3,23 +3,16 @@
* @author Philipp Haller
*/
-// $Id$
+
package scala.tools.partest
package nest
-import java.io.{ File, FilenameFilter, IOException, StringWriter }
+import java.io.{ FilenameFilter, IOException }
import java.net.URI
import scala.util.Properties.{ propOrElse, scalaCmd, scalacCmd }
-import scala.tools.util.PathResolver
import scala.tools.nsc.{ io, util }
-import util.{ ClassPath }
-import io.{ Path, Directory }
-import File.pathSeparator
-import ClassPath.{ join }
import PathResolver.{ Environment, Defaults }
-import RunnerUtils._
-
class ConsoleFileManager extends FileManager {
var testBuild: Option[String] = PartestDefaults.testBuild
@@ -57,7 +50,7 @@ class ConsoleFileManager extends FileManager {
var JAVAC_CMD = PartestDefaults.javacCmd
- NestUI.verbose("CLASSPATH: "+CLASSPATH)
+ vlog("CLASSPATH: "+CLASSPATH)
if (!srcDir.isDirectory) {
NestUI.failure("Source directory \"" + srcDir.path + "\" not found")
@@ -72,38 +65,33 @@ class ConsoleFileManager extends FileManager {
}
def findLatest() {
- NestUI.verbose("test parent: "+testParent)
+ vlog("test parent: "+testParent)
- def prefixFileWith(parent: File, relPath: String) = (io.File(parent) / relPath).toCanonical
+ def prefixFileWith(parent: File, relPath: String) = (SFile(parent) / relPath).toCanonical
def prefixFile(relPath: String) = (testParent / relPath).toCanonical
if (!testClasses.isEmpty) {
testClassesDir = Path(testClasses.get).toCanonical.toDirectory
- NestUI.verbose("Running with classes in "+testClassesDir)
+ vlog("Running with classes in "+testClassesDir)
- latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
latestActorsFile = testClassesDir / "library" / "actors"
latestReflectFile = testClassesDir / "reflect"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
- latestFjbgFile = testParent / "lib" / "fjbg.jar"
}
else if (testBuild.isDefined) {
val dir = Path(testBuild.get)
- NestUI.verbose("Running on "+dir)
- latestFile = dir / "bin"
+ vlog("Running on "+dir)
latestLibFile = dir / "lib/scala-library.jar"
latestActorsFile = dir / "lib/scala-actors.jar"
latestReflectFile = dir / "lib/scala-reflect.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
- latestFjbgFile = testParent / "lib" / "fjbg.jar"
}
else {
def setupQuick() {
- NestUI.verbose("Running build/quick")
- latestFile = prefixFile("build/quick/bin")
+ vlog("Running build/quick")
latestLibFile = prefixFile("build/quick/classes/library")
latestActorsFile = prefixFile("build/quick/classes/library/actors")
latestReflectFile = prefixFile("build/quick/classes/reflect")
@@ -112,9 +100,8 @@ class ConsoleFileManager extends FileManager {
}
def setupInst() {
- NestUI.verbose("Running dist (installed)")
+ vlog("Running dist (installed)")
val p = testParent.getParentFile
- latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
latestReflectFile = prefixFileWith(p, "lib/scala-reflect.jar")
@@ -123,8 +110,7 @@ class ConsoleFileManager extends FileManager {
}
def setupDist() {
- NestUI.verbose("Running dists/latest")
- latestFile = prefixFile("dists/latest/bin")
+ vlog("Running dists/latest")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
latestReflectFile = prefixFile("dists/latest/lib/scala-reflect.jar")
@@ -133,8 +119,7 @@ class ConsoleFileManager extends FileManager {
}
def setupPack() {
- NestUI.verbose("Running build/pack")
- latestFile = prefixFile("build/pack/bin")
+ vlog("Running build/pack")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
latestReflectFile = prefixFile("build/pack/lib/scala-reflect.jar")
@@ -142,11 +127,6 @@ class ConsoleFileManager extends FileManager {
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
- val dists = testParent / "dists"
- val build = testParent / "build"
- // in case of an installed dist, testRootDir is one level deeper
- val bin = testParent.parent / "bin"
-
def mostRecentOf(base: String, names: String*) =
names map (x => prefixFile(base + "/" + x).lastModified) reduceLeft (_ max _)
@@ -165,8 +145,6 @@ class ConsoleFileManager extends FileManager {
// run setup based on most recent time
pairs(pairs.keys max)()
-
- latestFjbgFile = prefixFile("lib/fjbg.jar")
}
LATEST_LIB = latestLibFile.getAbsolutePath
@@ -182,20 +160,18 @@ class ConsoleFileManager extends FileManager {
var LATEST_PARTEST: String = ""
var LATEST_ACTORS: String = ""
- var latestFile: File = _
var latestLibFile: File = _
var latestActorsFile: File = _
var latestReflectFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
- var latestFjbgFile: File = _
- def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
+ //def latestScalapFile: File = (latestLibFile.parent / "scalap.jar").jfile
+ //def latestScalapFile: File = new File(latestLibFile.getParentFile, "scalap.jar")
var testClassesDir: Directory = _
// initialize above fields
findLatest()
- var testFiles: List[io.Path] = Nil
-
+ /*
def getFiles(kind: String, cond: Path => Boolean): List[File] = {
def ignoreDir(p: Path) = List("svn", "obj") exists (p hasExtension _)
@@ -204,10 +180,10 @@ class ConsoleFileManager extends FileManager {
if (dir.isDirectory) NestUI.verbose("look in %s for tests" format dir)
else NestUI.failure("Directory '%s' not found" format dir)
- val files =
- if (testFiles.nonEmpty) testFiles filter (_.parent isSame dir)
- else dir.list filterNot ignoreDir filter cond toList
+ val files = dir.list filterNot ignoreDir filter cond toList
( if (failed) files filter (x => logFileExists(x, kind)) else files ) map (_.jfile)
}
+ */
+ var latestFjbgFile: File = _
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index e016fb7c92..8161e53bf9 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -3,90 +3,121 @@
* @author Philipp Haller
*/
-// $Id$
-
-package scala.tools.partest
+package scala.tools
+package partest
package nest
-import java.io.{File, PrintStream, FileOutputStream, BufferedReader,
- InputStreamReader, StringWriter, PrintWriter}
import utils.Properties._
-import RunnerUtils._
import scala.tools.nsc.Properties.{ versionMsg, setProp }
import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.io
-import io.{ Path }
import scala.collection.{ mutable, immutable }
+import PathSettings.srcDir
+import TestKinds._
+import scala.reflect.internal.util.Collections.distinctBy
class ConsoleRunner extends DirectRunner {
- import PathSettings.{ srcDir, testRoot }
-
- case class TestSet(kind: String, filter: Path => Boolean, msg: String)
- private def stdFilter(p: Path) = p.isDirectory || (p hasExtension "scala")
- private def antFilter(p: Path) = p.isFile && (p endsWith "build.xml")
-
- val testSets = {
- val pathFilter: Path => Boolean = x => x.isDirectory || (x hasExtension "scala")
-
- List(
- TestSet("pos", stdFilter, "Testing compiler (on files whose compilation should succeed)"),
- TestSet("neg", stdFilter, "Testing compiler (on files whose compilation should fail)"),
- TestSet("run", stdFilter, "Testing interpreter and backend"),
- TestSet("jvm", stdFilter, "Testing JVM backend"),
- TestSet("res", x => x.isFile && (x hasExtension "res"), "Testing resident compiler"),
- TestSet("buildmanager", _.isDirectory, "Testing Build Manager"),
- TestSet("shootout", stdFilter, "Testing shootout tests"),
- TestSet("script", stdFilter, "Testing script tests"),
- TestSet("scalacheck", stdFilter, "Testing ScalaCheck tests"),
- TestSet("scalap", _.isDirectory, "Run scalap decompiler tests"),
- TestSet("specialized", stdFilter, "Testing specialized tests"),
- TestSet("instrumented", stdFilter, "Testing instrumented tests"),
- TestSet("presentation", _.isDirectory, "Testing presentation compiler tests."),
- TestSet("ant", antFilter, "Run Ant task tests.")
- )
- }
+ import NestUI._
+ import NestUI.color._
+
+ // So we can ctrl-C a test run and still hear all
+ // the buffered failure info.
+ scala.sys addShutdownHook issueSummaryReport()
var fileManager: ConsoleFileManager = _
- private var testFiles: List[File] = List()
- private val errors = PartestDefaults.errorCount
- private val testSetKinds = testSets map (_.kind)
- private val testSetArgs = testSets map ("--" + _.kind)
- private val testSetArgMap = testSetArgs zip testSets toMap
+ private var totalTests = 0
+ private val passedTests = mutable.ListBuffer[TestState]()
+ private val failedTests = mutable.ListBuffer[TestState]()
+
+ def comment(s: String) = echo(magenta("# " + s))
+ def levyJudgment() = {
+ if (totalTests == 0) echoMixed("No tests to run.")
+ else if (elapsedMillis == 0) echoMixed("Test Run ABORTED")
+ else if (isSuccess) echoPassed("Test Run PASSED")
+ else echoFailed("Test Run FAILED")
+ }
- def denotesTestSet(arg: String) = testSetArgs contains arg
+ def passFailString(passed: Int, failed: Int, skipped: Int): String = {
+ val total = passed + failed + skipped
+ val isSuccess = failed == 0
+ def p0 = s"$passed/$total"
+ def p = ( if (isSuccess) bold(green(p0)) else p0 ) + " passed"
+ def f = if (failed == 0) "" else bold(red("" + failed)) + " failed"
+ def s = if (skipped == 0) "" else bold(yellow("" + skipped)) + " skipped"
- private def printVersion() { NestUI outline (versionMsg + "\n") }
+ oempty(p, f, s) mkString ", "
+ }
+
+ private var summarizing = false
+ private var elapsedMillis = 0L
+ private var expectedFailures = 0
+ private def isSuccess = failedTests.size == expectedFailures
+
+ def issueSummaryReport() {
+ // Don't run twice
+ if (!summarizing) {
+ summarizing = true
+
+ val passed0 = passedTests.toList
+ val failed0 = failedTests.toList
+ val passed = passed0.size
+ val failed = failed0.size
+ val skipped = totalTests - (passed + failed)
+ val passFail = passFailString(passed, failed, skipped)
+ val elapsed = if (elapsedMillis > 0) " (elapsed time: " + elapsedString(elapsedMillis) + ")" else ""
+ val message = passFail + elapsed
+
+ if (failed0.nonEmpty) {
+ if (isPartestVerbose) {
+ echo(bold(cyan("##### Transcripts from failed tests #####\n")))
+ failed0 foreach { state =>
+ comment("partest " + state.testFile)
+ echo(state.transcriptString + "\n")
+ }
+ }
+
+ def files_s = failed0.map(_.testFile).mkString(""" \""" + "\n ")
+ echo("# Failed test paths (this command will update checkfiles)")
+ echo("test/partest --update-check \\\n " + files_s + "\n")
+ }
+
+ echo(message)
+ levyJudgment()
+ }
+ }
private val unaryArgs = List(
- "--pack", "--all", "--verbose", "--show-diff", "--show-log",
+ "--pack", "--all",
+ "--terse", "--verbose", "--show-diff", "--show-log", "--self-test",
"--failed", "--update-check", "--version", "--ansi", "--debug", "--help"
- ) ::: testSetArgs
+ ) ::: standardArgs
private val binaryArgs = List(
- "--grep", "--srcpath", "--buildpath", "--classpath"
+ "--grep", "--srcpath", "--buildpath", "--classpath", "--timeout"
)
- // true if a test path matches the --grep expression.
- private def pathMatchesExpr(path: Path, expr: String) = {
- def pred(p: Path) = file2String(p.toFile) contains expr
- def srcs = path.toDirectory.deepList() filter (_.hasExtension("scala", "java"))
-
- (path.isFile && pred(path)) ||
- (path.isDirectory && srcs.exists(pred)) ||
- (pred(path changeExtension "check"))
- }
-
def main(argstr: String) {
val parsed = CommandLineParser(argstr) withUnaryArgs unaryArgs withBinaryArgs binaryArgs
- val args = onlyValidTestPaths(parsed.residualArgs)
- /** Early return on no args, version, or invalid args */
- if (argstr == "") return NestUI.usage()
- if (parsed isSet "--version") return printVersion
- if (parsed isSet "--help") return NestUI.usage()
+ if (parsed isSet "--debug") NestUI.setDebug()
+ if (parsed isSet "--verbose") NestUI.setVerbose()
+ if (parsed isSet "--terse") NestUI.setTerse()
+ if (parsed isSet "--show-diff") NestUI.setDiffOnFail()
+
+ // Early return on no args, version, or invalid args
+ if (parsed isSet "--version") return echo(versionMsg)
+ if ((argstr == "") || (parsed isSet "--help")) return NestUI.usage()
+
+ val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath
+ if (invalid.nonEmpty) {
+ if (isPartestVerbose)
+ invalid foreach (p => echoWarning(s"Discarding invalid test path " + p))
+ else if (!isPartestTerse)
+ echoWarning(s"Discarding ${invalid.size} invalid test paths")
+ }
parsed get "--srcpath" foreach (x => setProp("partest.srcdir", x))
+ parsed get "--timeout" foreach (x => setProp("partest.timeout", x))
fileManager =
if (parsed isSet "--buildpath") new ConsoleFileManager(parsed("--buildpath"))
@@ -94,146 +125,102 @@ class ConsoleRunner extends DirectRunner {
else if (parsed isSet "--pack") new ConsoleFileManager("build/pack")
else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
- def argNarrowsTests(x: String) = denotesTestSet(x) || denotesTestPath(x)
-
- NestUI._verbose = parsed isSet "--verbose"
- fileManager.showDiff = true
- // parsed isSet "--show-diff"
fileManager.updateCheck = parsed isSet "--update-check"
- fileManager.showLog = parsed isSet "--show-log"
fileManager.failed = parsed isSet "--failed"
- if (parsed isSet "--ansi") NestUI initialize NestUI.MANY
- if (parsed isSet "--timeout") fileManager.timeout = parsed("--timeout")
- if (parsed isSet "--debug") setProp("partest.debug", "true")
+ val partestTests = (
+ if (parsed isSet "--self-test") TestKinds.testsForPartest
+ else Nil
+ )
- def addTestFile(file: File) = {
- if (!file.exists)
- NestUI.failure("Test file '%s' not found, skipping.\n" format file)
- else {
- NestUI.verbose("adding test file " + file)
- testFiles +:= file
- }
- }
+ val grepExpr = parsed get "--grep" getOrElse ""
// If --grep is given we suck in every file it matches.
-
- val grepOption = parsed get "--grep"
- val grepPaths = grepOption.toList flatMap { expr =>
- val subjectDirs = testSetKinds map (srcDir / _ toDirectory)
- val testPaths = subjectDirs flatMap (_.files filter stdFilter)
- val paths = testPaths filter (p => pathMatchesExpr(p, expr))
-
+ var grepMessage = ""
+ val greppedTests = if (grepExpr == "") Nil else {
+ val paths = grepFor(grepExpr)
if (paths.isEmpty)
- NestUI.failure("--grep string '%s' matched no tests." format expr)
+ echoWarning(s"grep string '$grepExpr' matched no tests.\n")
- paths map (_.jfile)
+ paths.sortBy(_.toString)
}
- val grepMessage = grepOption map (x => "Argument '%s' matched %d test(s)".format(x, grepPaths.size)) getOrElse ""
- grepPaths foreach addTestFile
- args foreach (x => addTestFile(new File(x)))
-
- // If no file arguments were given, we assume --all
- val enabledTestSets: List[TestSet] = {
- val enabledArgs = testSetArgs filter parsed.isSet
-
- if (args.isEmpty && !(parsed isSet "--grep") && (enabledArgs.isEmpty || (parsed isSet "--all"))) testSets
- else enabledArgs map testSetArgMap
- }
+ val isRerun = parsed isSet "--failed"
+ val rerunTests = if (isRerun) TestKinds.failedTests else Nil
+ def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests
+ val givenKinds = standardArgs filter parsed.isSet
+ val kinds = (
+ if (parsed isSet "--all") standardKinds
+ else if (givenKinds.nonEmpty) givenKinds map (_ stripPrefix "--")
+ else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all
+ else Nil
+ )
+ val kindsTests = kinds flatMap testsFor
val dir =
if (fileManager.testClasses.isDefined) fileManager.testClassesDir
else fileManager.testBuildFile getOrElse {
fileManager.latestCompFile.getParentFile.getParentFile.getAbsoluteFile
}
- val vmBin = javaHome + File.separator + "bin"
- val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
- val vmOpts = fileManager.JAVA_OPTS
-
- NestUI.verbose("enabled test sets: " + (enabledTestSets map (_.kind) mkString " "))
-
- List(
- "Scala compiler classes in: " + dir,
- "Scala version is: " + versionMsg,
- "Scalac options are: " + fileManager.SCALAC_OPTS,
- "Java binaries in: " + vmBin,
- "Java runtime is: " + vmName,
- "Java options are: " + vmOpts,
- "Source directory is: " + srcDir,
- ""
- ) foreach (x => NestUI verbose (x + "\n"))
-
- NestUI.verbose("available processors: " + Runtime.getRuntime().availableProcessors())
-
- // Dragged down here so it isn't buried under the banner.
- if (grepMessage != "")
- NestUI.normal(grepMessage + "\n")
-
- val ((successes, failures), elapsedMillis) = timed(testCheckAll(enabledTestSets))
- val total = successes + failures
-
- val elapsedSecs = elapsedMillis/1000
- val elapsedMins = elapsedSecs/60
- val elapsedHrs = elapsedMins/60
- val dispMins = elapsedMins - elapsedHrs * 60
- val dispSecs = elapsedSecs - elapsedMins * 60
-
- val dispElapsed = {
- def form(num: Long) = if (num < 10) "0"+num else ""+num
- form(elapsedHrs)+":"+form(dispMins)+":"+form(dispSecs)
+ def testContributors = {
+ List(
+ if (partestTests.isEmpty) "" else "partest self-tests",
+ if (rerunTests.isEmpty) "" else "previously failed tests",
+ if (kindsTests.isEmpty) "" else s"${kinds.size} named test categories",
+ if (greppedTests.isEmpty) "" else s"${greppedTests.size} tests matching '$grepExpr'",
+ if (individualTests.isEmpty) "" else "specified tests"
+ ) filterNot (_ == "") mkString ", "
}
- if (failures == 0)
- NestUI.success("All of "+total+" tests were successful (elapsed time: "+dispElapsed+")\n")
- else
- NestUI.failure(failures+" of "+total+" tests failed (elapsed time: "+dispElapsed+")\n")
+ def banner = {
+ val vmBin = javaHome + fileSeparator + "bin"
+ val vmName = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo)
+ val vmOpts = fileManager.JAVA_OPTS
+
+ s"""|Scala compiler classes in: $dir
+ |Scala version is: $versionMsg
+ |Scalac options are: ${fileManager.SCALAC_OPTS mkString " "}
+ |Java binaries in: $vmBin
+ |Java runtime is: $vmName
+ |Java options are: $vmOpts
+ |Source directory is: $srcDir
+ |Available processors: ${Runtime.getRuntime().availableProcessors()}
+ |Java Classpath: ${sys.props("java.class.path")}
+ """.stripMargin
+ }
- System exit ( if (failures == errors) 0 else 1 )
- }
+ chatty(banner)
- def runTests(testSet: TestSet): (Int, Int) = {
- val TestSet(kind, filter, msg) = testSet
+ val allTests: List[Path] = distinctBy(miscTests ++ kindsTests)(_.toCanonical) sortBy (_.toString)
+ val grouped = (allTests groupBy kindOf).toList sortBy (x => standardKinds indexOf x._1)
- fileManager.getFiles(kind, filter) match {
- case Nil => NestUI.verbose("test dir empty\n") ; (0, 0)
- case files =>
- NestUI.verbose("test files: "+files)
- NestUI.outline("\n"+msg+"\n")
- resultsToStatistics(runTestsForFiles(files, kind))
+ totalTests = allTests.size
+ expectedFailures = propOrNone("partest.errors") match {
+ case Some(num) => num.toInt
+ case _ => 0
}
- }
-
- /**
- * @return (success count, failure count)
- */
- def testCheckAll(enabledSets: List[TestSet]): (Int, Int) = {
- def kindOf(f: File) = {
- (srcDir relativize Path(f).toCanonical).segments match {
- case (".." :: "scaladoc" :: xs) => xs.head
- case xs => xs.head
+ val expectedFailureMessage = if (expectedFailures == 0) "" else s" (expecting $expectedFailures to fail)"
+ echo(s"Selected $totalTests tests drawn from $testContributors$expectedFailureMessage\n")
+
+ val (_, millis) = timed {
+ for ((kind, paths) <- grouped) {
+ val num = paths.size
+ val ss = if (num == 1) "" else "s"
+ comment(s"starting $num test$ss in $kind")
+ val results = runTestsForFiles(paths map (_.jfile), kind)
+ val (passed, failed) = results partition (_.isOk)
+
+ passedTests ++= passed
+ failedTests ++= failed
+ if (failed.nonEmpty) {
+ comment(passFailString(passed.size, failed.size, 0) + " in " + kind)
+ }
+ echo("")
}
}
-
- val (valid, invalid) = testFiles partition (x => testSetKinds contains kindOf(x))
- invalid foreach (x => NestUI.failure(
- "Invalid test file '%s', skipping.\n".format(x) +
- "(Test kind '%s' not in known set '%s')".format(kindOf(x), testSetKinds))
- )
-
- val grouped = (valid groupBy kindOf).toList sortBy (x => testSetKinds indexOf x._1)
- val runTestsFileLists =
- for ((kind, files) <- grouped) yield {
- NestUI.outline("\nTesting individual files\n")
- resultsToStatistics(runTestsForFiles(files, kind))
- }
-
- if (enabledSets.nonEmpty)
- NestUI.verbose("Run sets: "+enabledSets)
-
- val results = runTestsFileLists ::: (enabledSets map runTests)
-
- (results map (_._1) sum, results map (_._2) sum)
+ this.elapsedMillis = millis
+ issueSummaryReport()
+ System exit ( if (isSuccess) 0 else 1 )
}
}
diff --git a/src/partest/scala/tools/partest/nest/DirectCompiler.scala b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
new file mode 100644
index 0000000000..8e5ff2abc4
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/DirectCompiler.scala
@@ -0,0 +1,105 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Philipp Haller
+ */
+
+package scala.tools.partest
+package nest
+
+import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError }
+import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter }
+import scala.tools.nsc.util.{ FakePos, stackTraceString }
+import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
+import scala.reflect.io.AbstractFile
+import scala.reflect.internal.util.Position
+import java.io.{ BufferedReader, PrintWriter, FileReader, Writer, FileWriter }
+
+class ExtConsoleReporter(settings: Settings, val writer: PrintWriter) extends ConsoleReporter(settings, Console.in, writer) {
+ shortname = true
+ // override def error(pos: Position, msg: String): Unit
+}
+
+class TestSettings(cp: String, error: String => Unit) extends Settings(error) {
+ def this(cp: String) = this(cp, _ => ())
+
+ nowarnings.value = false
+ encoding.value = "UTF-8"
+ classpath.value = cp
+}
+
+class PartestGlobal(settings: Settings, reporter: Reporter) extends Global(settings, reporter) {
+ // override def abort(msg: String): Nothing
+ // override def globalError(msg: String): Unit
+ // override def supplementErrorMessage(msg: String): String
+}
+class DirectCompiler(val fileManager: FileManager) {
+ def newGlobal(settings: Settings, reporter: Reporter): PartestGlobal =
+ new PartestGlobal(settings, reporter)
+
+ def newGlobal(settings: Settings, logWriter: FileWriter): Global =
+ newGlobal(settings, new ExtConsoleReporter(settings, new PrintWriter(logWriter)))
+
+ def newSettings(): TestSettings = new TestSettings(fileManager.LATEST_LIB)
+ def newSettings(outdir: String): TestSettings = {
+ val cp = ClassPath.join(fileManager.LATEST_LIB, outdir)
+ val s = new TestSettings(cp)
+ s.outdir.value = outdir
+ s
+ }
+
+ def compile(runner: Runner, opts0: List[String], sources: List[File]): TestState = {
+ import runner.{ sources => _, _ }
+
+ val testSettings = new TestSettings(ClassPath.join(fileManager.LATEST_LIB, outDir.getPath))
+ val logWriter = new FileWriter(logFile)
+ val srcDir = if (testFile.isDirectory) testFile else Path(testFile).parent.jfile
+ val opts = fileManager.updatePluginPath(opts0, AbstractFile getDirectory outDir, AbstractFile getDirectory srcDir)
+ val command = new CompilerCommand(opts, testSettings)
+ val global = newGlobal(testSettings, logWriter)
+ val reporter = global.reporter.asInstanceOf[ExtConsoleReporter]
+ def errorCount = reporter.ERROR.count
+
+ def defineSettings(s: Settings) = {
+ s.outputDirs setSingleOutput outDir.getPath
+ // adding codelib.jar to the classpath
+ // codelib provides the possibility to override standard reify
+ // this shields the massive amount of reification tests from changes in the API
+ prependToClasspaths(s, codelib)
+ s.classpath append fileManager.CLASSPATH // adding this why?
+
+ // add the instrumented library version to classpath
+ if (kind == "specialized")
+ prependToClasspaths(s, speclib)
+
+ // check that option processing succeeded
+ opts0.isEmpty || command.ok
+ }
+
+ if (!defineSettings(testSettings))
+ if (opts0.isEmpty)
+ reporter.error(null, s"bad settings: $testSettings")
+ else
+ reporter.error(null, opts0.mkString("bad options: ", space, ""))
+
+ def ids = sources.map(_.testIdent) mkString space
+ vlog(s"% scalac $ids")
+
+ def execCompile() =
+ if (command.shouldStopWithInfo) {
+ logWriter append (command getInfoMessage global)
+ runner genFail "compilation stopped with info"
+ } else {
+ new global.Run compile sources.map(_.getPath)
+ if (!reporter.hasErrors) runner.genPass()
+ else {
+ reporter.printSummary()
+ reporter.writer.close()
+ runner.genFail(s"compilation failed with $errorCount errors")
+ }
+ }
+
+ try { execCompile() }
+ catch { case t: Throwable => reporter.error(null, t.getMessage) ; runner.genCrash(t) }
+ finally { logWriter.close() }
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
deleted file mode 100644
index 32ef8b41ea..0000000000
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ /dev/null
@@ -1,75 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.File
-import scala.util.Properties.setProp
-import scala.tools.nsc.util.ScalaClassLoader
-import scala.tools.nsc.io.Path
-import scala.collection.{ mutable, immutable }
-import java.util.concurrent._
-import scala.collection.convert.decorateAll._
-
-case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
-
-trait DirectRunner {
- def fileManager: FileManager
-
- import PartestDefaults.numThreads
-
- def denotesTestFile(arg: String) = Path(arg).hasExtension("scala", "res", "xml")
- def denotesTestDir(arg: String) = Path(arg).ifDirectory(_.files.nonEmpty) exists (x => x)
- def denotesTestPath(arg: String) = denotesTestDir(arg) || denotesTestFile(arg)
-
- /** No duplicate, no empty directories, don't mess with this unless
- * you like partest hangs.
- */
- def onlyValidTestPaths[T](args: List[T]): List[T] = {
- args.distinct filter (arg => denotesTestPath("" + arg) || {
- NestUI.warning("Discarding invalid test path '%s'\n" format arg)
- false
- })
- }
- def runTestsForFiles(_kindFiles: List[File], kind: String): immutable.Map[String, TestState] = {
- System.setProperty("line.separator", "\n")
-
- // @partest maintainer: we cannot create a fresh file manager here
- // since the FM must respect --buildpath and --classpath from the command line
- // for example, see how it's done in ReflectiveRunner
- //val consFM = new ConsoleFileManager
- //import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- val latestCompFile = new File(fileManager.LATEST_COMP)
- val latestReflectFile = new File(fileManager.LATEST_REFLECT)
- val latestLibFile = new File(fileManager.LATEST_LIB)
- val latestPartestFile = new File(fileManager.LATEST_PARTEST)
- val latestActorsFile = new File(fileManager.LATEST_ACTORS)
- val scalacheckURL = PathSettings.scalaCheck.toURL
- val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- scalacheckURL :: (List(latestCompFile, latestReflectFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
- )
-
- val kindFiles = onlyValidTestPaths(_kindFiles)
- val pool = Executors.newFixedThreadPool(numThreads)
- val manager = new RunnerManager(kind, fileManager, TestRunParams(scalaCheckParentClassLoader))
- val futures = kindFiles map (f => (f, pool submit callable(manager runTest f))) toMap
-
- pool.shutdown()
- try if (!pool.awaitTermination(4, TimeUnit.HOURS))
- NestUI.warning("Thread pool timeout elapsed before all tests were complete!")
- catch { case t: InterruptedException =>
- NestUI.warning("Thread pool was interrupted")
- t.printStackTrace()
- }
-
- for ((file, future) <- futures) yield {
- val state = if (future.isCancelled) TestState.Timeout else future.get
- (file.getAbsolutePath, state)
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 70fdb33c6a..230ada4803 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -12,8 +12,7 @@ import java.io.{File, FilenameFilter, IOException, StringWriter,
FileInputStream, FileOutputStream, BufferedReader,
FileReader, PrintWriter, FileWriter}
import java.net.URI
-import scala.tools.nsc.io.{ Path, Directory, File => SFile }
-import scala.sys.process._
+import scala.reflect.io.AbstractFile
import scala.collection.mutable
trait FileUtil {
@@ -62,6 +61,23 @@ trait FileManager extends FileUtil {
var LATEST_PARTEST: String
var LATEST_ACTORS: String
+ protected def relativeToLibrary(what: String): String = {
+ def jarname = if (what startsWith "scala") s"$what.jar" else s"scala-$what.jar"
+ if (LATEST_LIB endsWith ".jar")
+ (SFile(LATEST_LIB).parent / jarname).toAbsolute.path
+ else
+ (SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
+ }
+ def latestScaladoc = relativeToLibrary("scaladoc")
+ def latestInteractive = relativeToLibrary("interactive")
+ def latestScalapFile = relativeToLibrary("scalap")
+ def latestPaths = List(
+ LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS,
+ latestScalapFile, latestScaladoc, latestInteractive
+ )
+ def latestFiles = latestPaths map (p => new java.io.File(p))
+ def latestUrls = latestFiles map (_.toURI.toURL)
+
var showDiff = false
var updateCheck = false
var showLog = false
@@ -69,18 +85,11 @@ trait FileManager extends FileUtil {
var SCALAC_OPTS = PartestDefaults.scalacOpts.split(' ').toSeq
var JAVA_OPTS = PartestDefaults.javaOpts
- var timeout = PartestDefaults.timeout
- // how can 15 minutes not be enough? What are you doing, run/lisp.scala?
- // You complete in 11 seconds on my machine.
- var oneTestTimeout = 60 * 60 * 1000
/** Only when --debug is given. */
lazy val testTimings = new mutable.HashMap[String, Long]
def recordTestTiming(name: String, milliseconds: Long) =
synchronized { testTimings(name) = milliseconds }
- def showTestTimings() {
- testTimings.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %s".format(k, v)) }
- }
def getLogFile(dir: File, fileBase: String, kind: String): File =
new File(dir, fileBase + "-" + kind + ".log")
@@ -121,4 +130,34 @@ trait FileManager extends FileUtil {
f.printlnAll(f.lines.toList map replace: _*)
}
+
+ /** Massage args to merge plugins and fix paths.
+ * Plugin path can be relative to test root, or cwd is out.
+ * While we're at it, mix in the baseline options, too.
+ * That's how ant passes in the plugins dir.
+ */
+ def updatePluginPath(args: List[String], out: AbstractFile, srcdir: AbstractFile): List[String] = {
+ val dir = testRootDir
+ // The given path, or the output dir if ".", or a temp dir if output is virtual (since plugin loading doesn't like virtual)
+ def pathOrCwd(p: String) =
+ if (p == ".") {
+ val plugxml = "scalac-plugin.xml"
+ val pout = if (out.isVirtual) Directory.makeTemp() else Path(out.path)
+ val srcpath = Path(srcdir.path)
+ val pd = (srcpath / plugxml).toFile
+ if (pd.exists) pd copyTo (pout / plugxml)
+ pout
+ } else Path(p)
+ def absolutize(path: String) = pathOrCwd(path) match {
+ case x if x.isAbsolute => x.path
+ case x => (dir / x).toAbsolute.path
+ }
+
+ val xprefix = "-Xplugin:"
+ val (xplugs, others) = args partition (_ startsWith xprefix)
+ val Xplugin = if (xplugs.isEmpty) Nil else List(xprefix +
+ (xplugs map (_ stripPrefix xprefix) flatMap (_ split pathSeparator) map absolutize mkString pathSeparator)
+ )
+ SCALAC_OPTS.toList ::: others ::: Xplugin
+ }
}
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
index 70db6d0ed1..564270e531 100644
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ b/src/partest/scala/tools/partest/nest/NestUI.scala
@@ -3,14 +3,42 @@
* @author Philipp Haller
*/
-// $Id$
-
package scala.tools.partest
package nest
import java.io.PrintWriter
+class Colors(enabled: => Boolean) {
+ import Console._
+
+ val bold = colored(BOLD)
+ val yellow = colored(YELLOW)
+ val green = colored(GREEN)
+ val blue = colored(BLUE)
+ val red = colored(RED)
+ val red_b = colored(RED_B)
+ val green_b = colored(GREEN_B)
+ val cyan = colored(CYAN)
+ val magenta = colored(MAGENTA)
+
+ private def colored(code: String): String => String =
+ s => if (enabled) code + s + RESET else s
+}
+
object NestUI {
+ private val testNum = new java.util.concurrent.atomic.AtomicInteger(1)
+ @volatile private var testNumberFmt = "%3d"
+ // @volatile private var testNumber = 1
+ private def testNumber = testNumberFmt format testNum.getAndIncrement()
+ def resetTestNumber(max: Int = -1) {
+ testNum set 1
+ val width = if (max > 0) max.toString.length else 3
+ testNumberFmt = s"%${width}d"
+ }
+
+ var colorEnabled = sys.props contains "partest.colors"
+ val color = new Colors(colorEnabled)
+ import color._
val NONE = 0
val SOME = 1
@@ -22,11 +50,66 @@ object NestUI {
private var _warning = ""
private var _default = ""
+ private var dotCount = 0
+ private val DotWidth = 72
+
+ def leftFlush() {
+ if (dotCount != 0) {
+ normal("\n")
+ dotCount = 0
+ }
+ }
+
+ def statusLine(state: TestState) = {
+ import state._
+ import TestState._
+ val colorizer = state match {
+ case _: Skip => yellow
+ case _: Updated => cyan
+ case s if s.isOk => green
+ case _ => red
+ }
+ val word = bold(colorizer(state.shortStatus))
+ f"$word $testNumber - $testIdent%-40s$reasonString"
+ }
+
+ def reportTest(state: TestState) = {
+ if (isTerse && state.isOk) {
+ if (dotCount >= DotWidth) {
+ outline("\n.")
+ dotCount = 1
+ }
+ else {
+ outline(".")
+ dotCount += 1
+ }
+ }
+ else {
+ echo(statusLine(state))
+ if (!state.isOk && isDiffy) {
+ val differ = bold(red("% ")) + "diff "
+ state.transcript find (_ startsWith differ) foreach (echo(_))
+ }
+ }
+ }
+
+ def echo(message: String): Unit = synchronized {
+ leftFlush()
+ print(message + "\n")
+ }
+ def chatty(msg: String) = if (isVerbose) echo(msg)
+
+ def echoSkipped(msg: String) = echo(yellow(msg))
+ def echoPassed(msg: String) = echo(bold(green(msg)))
+ def echoFailed(msg: String) = echo(bold(red(msg)))
+ def echoMixed(msg: String) = echo(bold(yellow(msg)))
+ def echoWarning(msg: String) = echo(bold(red(msg)))
+
def initialize(number: Int) = number match {
case MANY =>
_outline = Console.BOLD + Console.BLACK
_success = Console.BOLD + Console.GREEN
- _failure = Console.BOLD + Console.RED
+ _failure = Console.BOLD + Console.RED
_warning = Console.BOLD + Console.YELLOW
_default = Console.RESET
case SOME =>
@@ -54,9 +137,6 @@ object NestUI {
}
def warning(msg: String) = print(_warning + msg + _default)
- def warning(msg: String, wr: PrintWriter) = synchronized {
- wr.print(_warning + msg + _default)
- }
def normal(msg: String) = print(_default + msg)
def normal(msg: String, wr: PrintWriter) = synchronized {
@@ -64,10 +144,7 @@ object NestUI {
}
def usage() {
- println("Usage: NestRunner [<options>] [<testfile> ..] [<resfile>]")
- println(" <testfile>: list of files ending in '.scala'")
- println(" <resfile>: a file not ending in '.scala'")
- println(" <options>:")
+ println("Usage: NestRunner [options] [test test ...]")
println
println(" Test categories:")
println(" --all run all tests")
@@ -76,18 +153,13 @@ object NestUI {
println(" --run run interpreter and backend tests")
println(" --jvm run JVM backend tests")
println(" --res run resident compiler tests")
- println(" --buildmanager run Build Manager tests")
println(" --scalacheck run ScalaCheck tests")
- println(" --script run script runner tests")
- println(" --shootout run shootout tests")
println(" --instrumented run instrumented tests")
println(" --presentation run presentation compiler tests")
- println(" --grep <expr> run all tests whose source file contains <expr>")
println
println(" Other options:")
println(" --pack pick compiler/reflect/library in build/pack, and run all tests")
- println(" --show-log show log")
- println(" --show-diff show diff between log and check file")
+ println(" --grep <expr> run all tests whose source file contains <expr>")
println(" --failed run only those tests that failed during the last run")
println(" --update-check instead of failing tests with output change, update checkfile. (Use with care!)")
println(" --verbose show progress information")
@@ -105,17 +177,32 @@ object NestUI {
var _verbose = false
var _debug = false
+ var _terse = false
+ var _diff = false
+ def isVerbose = _verbose
+ def isDebug = _debug
+ def isTerse = _terse
+ def isDiffy = _diff
+
+ def setVerbose() {
+ _verbose = true
+ }
+ def setDebug() {
+ _debug = true
+ }
+ def setTerse() {
+ _terse = true
+ }
+ def setDiffOnFail() {
+ _diff = true
+ }
def verbose(msg: String) {
- if (_verbose) {
- outline("debug: ")
- println(msg)
- }
+ if (isVerbose)
+ System.err.println(msg)
}
def debug(msg: String) {
- if (isPartestDebug) {
- outline("debug: ")
- println(msg)
- }
+ if (isDebug)
+ System.err.println(msg)
}
}
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index 0ba34777a0..030c515947 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -5,11 +5,9 @@
package scala.tools.partest
package nest
-import scala.tools.nsc.Properties.{ setProp, propOrEmpty, propOrNone, propOrElse }
import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io
-import io.{ Path, File, Directory }
-import RunnerUtils._
+import scala.tools.nsc.io.{ Path, File, Directory }
+import Path._
object PathSettings {
import PartestDefaults.{ testRootDir, srcDirName }
@@ -19,6 +17,8 @@ object PathSettings {
private def findJar(d: Directory, name: String): Option[File] = findJar(d.files, name)
private def findJar(files: Iterator[File], name: String): Option[File] =
files filter (_ hasExtension "jar") find { _.name startsWith name }
+ private def findJarOrFail(name: String, ds: Directory*): File = findJar(ds flatMap (_.files) iterator, name) getOrElse
+ sys.error(s"'${name}.jar' not found in '${ds map (_.path) mkString ", "}'.")
// Directory <root>/test
lazy val testRoot: Directory = testRootDir getOrElse {
@@ -27,8 +27,8 @@ object PathSettings {
candidates find isPartestDir getOrElse sys.error("Directory 'test' not found.")
}
- // Directory <root>/test/files
- lazy val srcDir = Directory(testRoot / srcDirName toCanonical)
+ // Directory <root>/test/files or .../scaladoc
+ def srcDir = Directory(testRoot / srcDirName toCanonical)
// Directory <root>/test/files/lib
lazy val srcLibDir = Directory(srcDir / "lib")
@@ -73,8 +73,14 @@ object PathSettings {
sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
}
+ lazy val testInterface: File = findJarOrFail("test-interface", buildPackLibDir, srcLibDir)
+
lazy val diffUtils: File =
findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
+
+ /** The platform-specific support jar, `tools.jar`.
+ */
+ lazy val platformTools: Option[File] = PathResolver.SupplementalLocations.platformTools
}
class PathSettings() {
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 700667afcf..734affa153 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -3,8 +3,6 @@
* @author Philipp Haller
*/
-// $Id$
-
package scala.tools.partest
package nest
@@ -12,7 +10,6 @@ import scala.tools.nsc.Properties.{ setProp, propOrEmpty }
import scala.tools.nsc.util.ClassPath
import scala.tools.nsc.io
import io.Path
-import RunnerUtils._
import java.net.URLClassLoader
/* This class is used to load an instance of DirectRunner using
@@ -28,6 +25,12 @@ class ReflectiveRunner {
// was used to start the runner.
val sepRunnerClassName = "scala.tools.partest.nest.ConsoleRunner"
+ private def searchPath(option: String, as: List[String]): Option[String] = as match {
+ case `option` :: r :: _ => Some(r)
+ case _ :: rest => searchPath(option, rest)
+ case Nil => None
+ }
+
def main(args: String) {
val argList = (args.split("\\s")).toList
@@ -47,23 +50,18 @@ class ReflectiveRunner {
else // auto detection
new ConsoleFileManager
- import fileManager.
- { latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
- val files =
- Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
-
- val sepUrls = files map (_.toURL)
- // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
- var sepLoader = new URLClassLoader(sepUrls, null)
-
// this is a workaround for https://issues.scala-lang.org/browse/SI-5433
+ // when that bug is fixed, the addition of PathSettings.srcCodeLib can be removed
// we hack into the classloader that will become parent classloader for scalac
// this way we ensure that reflective macro lookup will pick correct Code.lift
- // it's also used to inject diffutils into the classpath when running partest from the test/partest script
- sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: (PathSettings.diffUtils +: files)) map (_.toURL), null)
+ // it's also used to inject diffutils into the classpath when running partest from the test/partest script
+ val srcCodeLibAndDiff = List(PathSettings.srcCodeLib, PathSettings.diffUtils, PathSettings.testInterface)
+ val sepUrls = srcCodeLibAndDiff.map(_.toURI.toURL) ::: fileManager.latestUrls
+ // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
+ val sepLoader = new URLClassLoader(sepUrls.toArray, null)
if (isPartestDebug)
- println("Loading classes from:\n" + sepUrls.mkString("\n"))
+ println("Loading classes from:\n " + fileManager.latestUrls.mkString("\n "))
// @partest maintainer: it seems to me that commented lines are incorrect
// if classPath is not empty, then it has been provided by the --classpath option
@@ -74,11 +72,11 @@ class ReflectiveRunner {
// case Some(cp) => Nil
// case _ => files.toList map (_.path)
//}
- val paths = files.toList map (_.path)
- val newClasspath = ClassPath.join(paths: _*)
+ setProp("java.class.path", ClassPath.join(fileManager.latestPaths: _*))
- setProp("java.class.path", newClasspath)
+ // don't let partest find pluginsdir; in ant build, standard plugin has dedicated test suite
+ //setProp("scala.home", latestLibFile.parent.parent.path)
setProp("scala.home", "")
if (isPartestDebug)
diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala
new file mode 100644
index 0000000000..1034d724d2
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/Runner.scala
@@ -0,0 +1,900 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala.tools.partest
+package nest
+
+import java.io.{ Console => _, _ }
+import java.net.URL
+import java.nio.charset.{ Charset, CharsetDecoder, CharsetEncoder, CharacterCodingException, CodingErrorAction => Action }
+import java.util.concurrent.Executors
+import java.util.concurrent.TimeUnit.NANOSECONDS
+import scala.collection.mutable.ListBuffer
+import scala.concurrent.duration.Duration
+import scala.io.Codec
+import scala.reflect.internal.FatalError
+import scala.sys.process.{ Process, ProcessLogger }
+import scala.tools.nsc.Properties.{ envOrElse, isWin, jdkHome, javaHome, propOrElse, propOrEmpty, setProp }
+import scala.tools.nsc.{ Settings, CompilerCommand, Global }
+import scala.tools.nsc.io.{ AbstractFile, PlainFile }
+import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.nsc.util.{ Exceptional, ScalaClassLoader, stackTraceString }
+import scala.tools.scalap.Main.decompileScala
+import scala.tools.scalap.scalax.rules.scalasig.ByteCode
+import scala.util.{ Try, Success, Failure }
+import ClassPath.{ join, split }
+import PartestDefaults.{ javaCmd, javacCmd }
+import TestState.{ Pass, Fail, Crash, Uninitialized, Updated }
+
+trait PartestRunSettings {
+ def gitPath: Path
+ def reportPath: Path
+ def logPath: Path
+
+ def testPaths: List[Path]
+
+ def gitDiffOptions: List[String]
+ def extraScalacOptions: List[String]
+ def extraJavaOptions: List[String]
+}
+
+class TestTranscript {
+ import NestUI.color._
+ private val buf = ListBuffer[String]()
+ private def pass(s: String) = bold(green("% ")) + s
+ private def fail(s: String) = bold(red("% ")) + s
+
+ def add(action: String): this.type = { buf += action ; this }
+ def append(text: String) { val s = buf.last ; buf.trimEnd(1) ; buf += (s + text) }
+
+ // Colorize prompts according to pass/fail
+ def fail: List[String] = buf.toList match {
+ case Nil => Nil
+ case xs => (xs.init map pass) :+ fail(xs.last)
+ }
+}
+
+/** Run a single test. Rubber meets road. */
+class Runner(val testFile: File, fileManager: FileManager, val testRunParams: TestRunParams) {
+ import fileManager._
+
+ // Override to true to have the outcome of this test displayed
+ // whether it passes or not; in general only failures are reported,
+ // except for a . per passing test to show progress.
+ def isEnumeratedTest = false
+
+ private var _lastState: TestState = null
+ private var _transcript = new TestTranscript
+
+ def lastState = if (_lastState == null) Uninitialized(testFile) else _lastState
+ def setLastState(s: TestState) = _lastState = s
+ def transcript: List[String] = _transcript.fail ++ logFile.fileLines
+ def pushTranscript(msg: String) = _transcript add msg
+
+ val parentFile = testFile.getParentFile
+ val kind = parentFile.getName
+ val fileBase = basename(testFile.getName)
+ val logFile = new File(parentFile, s"$fileBase-$kind.log")
+ val outFile = logFile changeExtension "obj"
+ val checkFile = testFile changeExtension "check"
+ val flagsFile = testFile changeExtension "flags"
+ val testIdent = testFile.testIdent // e.g. pos/t1234
+
+ lazy val outDir = { outFile.mkdirs() ; outFile }
+
+ type RanOneTest = (Boolean, LogContext)
+
+ def showCrashInfo(t: Throwable) {
+ System.err.println("Crashed running test $testIdent: " + t)
+ if (!isPartestTerse)
+ System.err.println(stackTraceString(t))
+ }
+ protected def crashHandler: PartialFunction[Throwable, TestState] = {
+ case t: InterruptedException =>
+ genTimeout()
+ case t: Throwable =>
+ showCrashInfo(t)
+ logFile.appendAll(stackTraceString(t))
+ genCrash(t)
+ }
+
+ def genPass() = Pass(testFile)
+ def genFail(reason: String) = Fail(testFile, reason, _transcript.fail)
+ def genTimeout() = Fail(testFile, "timed out", _transcript.fail)
+ def genCrash(caught: Throwable) = Crash(testFile, caught, _transcript.fail)
+ def genUpdated() = Updated(testFile)
+
+ def speclib = PathSettings.srcSpecLib.toString // specialization lib
+ def codelib = PathSettings.srcCodeLib.toString // reify lib
+
+ // Prepend to a classpath, but without incurring duplicate entries
+ def prependTo(classpath: String, path: String): String = {
+ val segments = ClassPath split classpath
+
+ if (segments startsWith path) classpath
+ else ClassPath.join(path :: segments distinct: _*)
+ }
+
+ def prependToJavaClasspath(path: String) {
+ val jcp = sys.props.getOrElse("java.class.path", "")
+ prependTo(jcp, path) match {
+ case `jcp` =>
+ case cp => sys.props("java.class.path") = cp
+ }
+ }
+ def prependToClasspaths(s: Settings, path: String) {
+ prependToJavaClasspath(path)
+ val scp = s.classpath.value
+ prependTo(scp, path) match {
+ case `scp` =>
+ case cp => s.classpath.value = cp
+ }
+ }
+
+ private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
+
+ def javac(files: List[File]): TestState = {
+ // compile using command-line javac compiler
+ val args = Seq(
+ javacCmd,
+ "-d",
+ outDir.getAbsolutePath,
+ "-classpath",
+ join(outDir.toString, CLASSPATH)
+ ) ++ files.map(_.getAbsolutePath)
+
+ pushTranscript(args mkString " ")
+ val captured = StreamCapture(runCommand(args, logFile))
+ if (captured.result) genPass() else {
+ logFile appendAll captured.stderr
+ genFail("java compilation failed")
+ }
+ }
+
+ def testPrompt = kind match {
+ case "res" => "nsc> "
+ case _ => "% "
+ }
+
+ /** Evaluate an action body and update the test state.
+ * @param failFn optionally map a result to a test state.
+ */
+ def nextTestAction[T](body: => T)(failFn: PartialFunction[T, TestState]): T = {
+ val result = body
+ setLastState( if (failFn isDefinedAt result) failFn(result) else genPass() )
+ result
+ }
+ def nextTestActionExpectTrue(reason: String, body: => Boolean): Boolean = (
+ nextTestAction(body) { case false => genFail(reason) }
+ )
+ def nextTestActionFailing(reason: String): Boolean = nextTestActionExpectTrue(reason, false)
+
+ private def assembleTestCommand(outDir: File, logFile: File): List[String] = {
+ // check whether there is a ".javaopts" file
+ val argsFile = testFile changeExtension "javaopts"
+ val argString = file2String(argsFile)
+ if (argString != "")
+ NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
+
+ val testFullPath = testFile.getAbsolutePath
+
+ // Note! As this currently functions, JAVA_OPTS must precede argString
+ // because when an option is repeated to java only the last one wins.
+ // That means until now all the .javaopts files were being ignored because
+ // they all attempt to change options which are also defined in
+ // partest.java_opts, leading to debug output like:
+ //
+ // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
+ // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
+ val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
+ val propertyOptions = List(
+ "-Dfile.encoding=UTF-8",
+ "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
+ "-Dpartest.output="+outDir.getAbsolutePath,
+ "-Dpartest.lib="+LATEST_LIB,
+ "-Dpartest.reflect="+LATEST_REFLECT,
+ "-Dpartest.cwd="+outDir.getParent,
+ "-Dpartest.test-path="+testFullPath,
+ "-Dpartest.testname="+fileBase,
+ "-Djavacmd="+javaCmd,
+ "-Djavaccmd="+javacCmd,
+ "-Duser.language=en",
+ "-Duser.country=US"
+ ) ++ extras
+
+ val classpath = if (extraClasspath != "") join(extraClasspath, CLASSPATH) else CLASSPATH
+
+ javaCmd +: (
+ (JAVA_OPTS.split(' ') ++ extraJavaOptions.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "").toList ++ Seq(
+ "-classpath",
+ join(outDir.toString, classpath)
+ ) ++ propertyOptions ++ Seq(
+ "scala.tools.nsc.MainGenericRunner",
+ "-usejavacp",
+ "Test",
+ "jvm"
+ )
+ )
+ }
+
+ /** Runs command redirecting standard out and
+ * error out to output file.
+ */
+ private def runCommand(args: Seq[String], outFile: File): Boolean = {
+ //(Process(args) #> outFile !) == 0 or (Process(args) ! pl) == 0
+ val pl = ProcessLogger(outFile)
+ val nonzero = 17 // rounding down from 17.3
+ def run: Int = {
+ val p = Process(args) run pl
+ try p.exitValue
+ catch {
+ case e: InterruptedException =>
+ NestUI verbose s"Interrupted waiting for command to finish (${args mkString " "})"
+ p.destroy
+ nonzero
+ case t: Throwable =>
+ NestUI verbose s"Exception waiting for command to finish: $t (${args mkString " "})"
+ p.destroy
+ throw t
+ }
+ finally pl.close()
+ }
+ (pl buffer run) == 0
+ }
+
+ private def execTest(outDir: File, logFile: File): Boolean = {
+ val cmd = assembleTestCommand(outDir, logFile)
+
+ pushTranscript((cmd mkString s" \\$EOL ") + " > " + logFile.getName)
+ nextTestAction(runCommand(cmd, logFile)) {
+ case false =>
+ _transcript append EOL + logFile.fileContents
+ genFail("non-zero exit code")
+ }
+ }
+
+ override def toString = s"""Test($testIdent, lastState = $lastState)"""
+
+ // result is unused
+ def newTestWriters() = {
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr, true)
+ // diff = ""
+
+ ((swr, wr))
+ }
+
+ def fail(what: Any) = {
+ NestUI.verbose("scalac: compilation of "+what+" failed\n")
+ false
+ }
+
+ /** Filter the diff for conditional blocks.
+ * The check file can contain lines of the form:
+ * `#partest java7`
+ * where the line contains a conventional flag name.
+ * In the diff output, these lines have the form:
+ * `> #partest java7`
+ * Blocks which don't apply are filtered out,
+ * and what remains is the desired diff.
+ * Line edit commands such as `0a1,6` don't count
+ * as diff, so return a nonempty diff only if
+ * material diff output was seen.
+ * Filtering the diff output (instead of every check
+ * file) means that we only post-process a test that
+ * might be failing, in the normal case.
+ */
+ def diffilter(d: String) = {
+ import scala.util.Properties.javaVersion
+ val prefix = "#partest"
+ val margin = "> "
+ val leader = margin + prefix
+ // use lines in block so labeled? Default to sorry, Charlie.
+ def retainOn(f: String) = {
+ val (invert, token) =
+ if (f startsWith "!") (true, f drop 1) else (false, f)
+ val cond = token match {
+ case "java7" => javaVersion startsWith "1.7"
+ case "java6" => javaVersion startsWith "1.6"
+ case "true" => true
+ case _ => false
+ }
+ if (invert) !cond else cond
+ }
+ if (d contains prefix) {
+ val sb = new StringBuilder
+ var retain = true // use the current line
+ var material = false // saw a line of diff
+ for (line <- d.lines)
+ if (line startsWith leader) {
+ val rest = (line stripPrefix leader).trim
+ retain = retainOn(rest)
+ } else if (retain) {
+ if (line startsWith margin) material = true
+ sb ++= line
+ sb ++= EOL
+ }
+ if (material) sb.toString else ""
+ } else d
+ }
+
+ def currentDiff = (
+ if (checkFile.canRead) diffilter(compareFiles(logFile, checkFile))
+ else compareContents(augmentString(file2String(logFile)).lines.toList, Nil)
+ )
+
+ val gitRunner = List("/usr/local/bin/git", "/usr/bin/git") map (f => new java.io.File(f)) find (_.canRead)
+ val gitDiffOptions = "--ignore-space-at-eol --no-index " + propOrEmpty("partest.git_diff_options")
+ // --color=always --word-diff
+
+ def gitDiff(f1: File, f2: File): Option[String] = {
+ try gitRunner map { git =>
+ val cmd = s"$git diff $gitDiffOptions $f1 $f2"
+ val diff = Process(cmd).lines_!.drop(4).map(_ + "\n").mkString
+
+ "\n" + diff
+ }
+ catch { case t: Exception => None }
+ }
+
+ /** Normalize the log output by applying test-specific filters
+ * and fixing filesystem-specific paths.
+ *
+ * Line filters are picked up from `filter: pattern` at the top of sources.
+ * The filtered line is detected with a simple "contains" test,
+ * and yes, "filter" means "filter out" in this context.
+ *
+ * File paths are detected using the absolute path of the test root.
+ * A string that looks like a file path is normalized by replacing
+ * the leading segments (the root) with "$ROOT" and by replacing
+ * any Windows backslashes with the one true file separator char.
+ */
+ def normalizeLog() {
+ // Apply judiciously; there are line comments in the "stub implementations" error output.
+ val slashes = """[/\\]+""".r
+ def squashSlashes(s: String) = slashes replaceAllIn (s, "/")
+
+ // this string identifies a path and is also snipped from log output.
+ // to preserve more of the path, could use fileManager.testRootPath
+ val elided = parentFile.getAbsolutePath
+
+ // something to mark the elision in the log file (disabled)
+ val ellipsis = "" //".../" // using * looks like a comment
+
+ // no spaces in test file paths below root, because otherwise how to detect end of path string?
+ val pathFinder = raw"""(?i)\Q${elided}${File.separator}\E([\${File.separator}\w]*)""".r
+ def canonicalize(s: String): String = (
+ pathFinder replaceAllIn (s, m => ellipsis + squashSlashes(m group 1))
+ )
+
+ def masters = {
+ val files = List(new File(parentFile, "filters"), new File(PathSettings.srcDir.path, "filters"))
+ files filter (_.exists) flatMap (_.fileLines) map (_.trim) filter (s => !(s startsWith "#"))
+ }
+ val filters = toolArgs("filter", split = false) ++ masters
+ val elisions = ListBuffer[String]()
+ //def lineFilter(s: String): Boolean = !(filters exists (s contains _))
+ def lineFilter(s: String): Boolean = (
+ filters map (_.r) forall { r =>
+ val res = (r findFirstIn s).isEmpty
+ if (!res) elisions += s
+ res
+ }
+ )
+
+ logFile.mapInPlace(canonicalize)(lineFilter)
+ if (isPartestVerbose && elisions.nonEmpty) {
+ import NestUI.color._
+ val emdash = bold(yellow("--"))
+ pushTranscript(s"filtering ${logFile.getName}$EOL${elisions mkString (emdash, EOL + emdash, EOL)}")
+ }
+ }
+
+ def diffIsOk: Boolean = {
+ // always normalize the log first
+ normalizeLog()
+ val diff = currentDiff
+ // if diff is not empty, is update needed?
+ val updating: Option[Boolean] = (
+ if (diff == "") None
+ else Some(fileManager.updateCheck)
+ )
+ pushTranscript(s"diff $logFile $checkFile")
+ nextTestAction(updating) {
+ case Some(true) =>
+ NestUI.verbose("Updating checkfile " + checkFile)
+ checkFile writeAll file2String(logFile)
+ genUpdated()
+ case Some(false) =>
+ // Get a word-highlighted diff from git if we can find it
+ val bestDiff = if (updating.isEmpty) "" else {
+ if (checkFile.canRead)
+ gitDiff(logFile, checkFile) getOrElse {
+ s"diff $logFile $checkFile\n$diff"
+ }
+ else diff
+ }
+ _transcript append bestDiff
+ genFail("output differs")
+ // TestState.fail("output differs", "output differs",
+ // genFail("output differs")
+ // TestState.Fail("output differs", bestDiff)
+ case None => genPass() // redundant default case
+ } getOrElse true
+ }
+
+ /** 1. Creates log file and output directory.
+ * 2. Runs script function, providing log file and output directory as arguments.
+ * 2b. or, just run the script without context and return a new context
+ */
+ def runInContext(body: => Boolean): (Boolean, LogContext) = {
+ val (swr, wr) = newTestWriters()
+ val succeeded = body
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ /** Grouped files in group order, and lex order within each group. */
+ def groupedFiles(sources: List[File]): List[List[File]] = (
+ if (sources.tail.nonEmpty) {
+ val grouped = sources groupBy (_.group)
+ grouped.keys.toList.sorted map (k => grouped(k) sortBy (_.getName))
+ }
+ else List(sources)
+ )
+
+ /** Source files for the given test file. */
+ def sources(file: File): List[File] = (
+ if (file.isDirectory)
+ file.listFiles.toList filter (_.isJavaOrScala)
+ else
+ List(file)
+ )
+
+ def newCompiler = new DirectCompiler(fileManager)
+
+ def attemptCompile(sources: List[File]): TestState = {
+ val state = newCompiler.compile(this, flagsForCompilation(sources), sources)
+ if (!state.isOk)
+ _transcript append ("\n" + file2String(logFile))
+
+ state
+ }
+
+ // snort or scarf all the contributing flags files
+ def flagsForCompilation(sources: List[File]): List[String] = {
+ def argsplitter(s: String) = words(s) filter (_.nonEmpty)
+ val perTest = argsplitter(flagsFile.fileContents)
+ val perGroup = if (testFile.isDirectory) {
+ sources flatMap { f => SFile(Path(f) changeExtension "flags").safeSlurp map argsplitter getOrElse Nil }
+ } else Nil
+ perTest ++ perGroup
+ }
+
+ def toolArgs(tool: String, split: Boolean = true): List[String] = {
+ def argsplitter(s: String) = if (split) words(s) filter (_.nonEmpty) else List(s)
+ def argsFor(f: File): List[String] = {
+ import scala.util.matching.Regex
+ val p = new Regex(s"(?:.*\\s)?${tool}:(?:\\s*)(.*)?", "args")
+ val max = 10
+ val src = Path(f).toFile.chars(codec)
+ val args = try {
+ src.getLines take max collectFirst {
+ case s if (p findFirstIn s).nonEmpty => for (m <- p findFirstMatchIn s) yield m group "args"
+ }
+ } finally src.close()
+ args.flatten map argsplitter getOrElse Nil
+ }
+ sources(testFile) flatMap argsFor
+ }
+
+ abstract class CompileRound {
+ def fs: List[File]
+ def result: TestState
+ def description: String
+
+ def fsString = fs map (_.toString stripPrefix parentFile.toString + "/") mkString " "
+ def isOk = result.isOk
+ def mkScalacString(): String = {
+ val flags = file2String(flagsFile) match {
+ case "" => ""
+ case s => " " + s
+ }
+ s"""scalac $fsString"""
+ }
+ override def toString = description + ( if (result.isOk) "" else "\n" + result.status )
+ }
+ case class OnlyJava(fs: List[File]) extends CompileRound {
+ def description = s"""javac $fsString"""
+ lazy val result = { pushTranscript(description) ; javac(fs) }
+ }
+ case class OnlyScala(fs: List[File]) extends CompileRound {
+ def description = mkScalacString()
+ lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
+ }
+ case class ScalaAndJava(fs: List[File]) extends CompileRound {
+ def description = mkScalacString()
+ lazy val result = { pushTranscript(description) ; attemptCompile(fs) }
+ }
+
+ def compilationRounds(file: File): List[CompileRound] = (
+ (groupedFiles(sources(file)) map mixedCompileGroup).flatten
+ )
+ def mixedCompileGroup(allFiles: List[File]): List[CompileRound] = {
+ val (scalaFiles, javaFiles) = allFiles partition (_.isScala)
+ val isMixed = javaFiles.nonEmpty && scalaFiles.nonEmpty
+ val round1 = if (scalaFiles.isEmpty) None else Some(ScalaAndJava(allFiles))
+ val round2 = if (javaFiles.isEmpty) None else Some(OnlyJava(javaFiles))
+ val round3 = if (!isMixed) None else Some(OnlyScala(scalaFiles))
+
+ List(round1, round2, round3).flatten
+ }
+
+ def runNegTest() = runInContext {
+ val rounds = compilationRounds(testFile)
+
+ // failing means Does Not Compile
+ val failing = rounds find (x => nextTestActionExpectTrue("compilation failed", x.isOk) == false)
+
+ // which means passing if it checks and didn't crash the compiler
+ // or, OK, we'll let you crash the compiler with a FatalError if you supply a check file
+ def checked(r: CompileRound) = r.result match {
+ case Crash(_, t, _) if !checkFile.canRead || !t.isInstanceOf[FatalError] => false
+ case _ => diffIsOk
+ }
+
+ failing map (checked) getOrElse nextTestActionFailing("expected compilation failure")
+ }
+
+ def runTestCommon(andAlso: => Boolean): (Boolean, LogContext) = runInContext {
+ compilationRounds(testFile).forall(x => nextTestActionExpectTrue("compilation failed", x.isOk)) && andAlso
+ }
+
+ // Apache Ant 1.6 or newer
+ def ant(args: Seq[String], output: File): Boolean = {
+ val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
+ val antLibDir = Directory(antDir / "lib")
+ val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
+ val antOptions =
+ if (NestUI._verbose) List("-verbose", "-noinput")
+ else List("-noinput")
+ val cmd = javaCmd +: (
+ JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
+ "-classpath",
+ antLauncherPath,
+ "org.apache.tools.ant.launch.Launcher"
+ ) ++ antOptions ++ args
+ )
+
+ runCommand(cmd, output)
+ }
+
+ def runAntTest(): (Boolean, LogContext) = {
+ val (swr, wr) = newTestWriters()
+
+ val succeeded = try {
+ val binary = "-Dbinary="+(
+ if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
+ else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
+ else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
+ else "installed"
+ )
+ val args = Array(binary, "-logfile", logFile.getPath, "-file", testFile.getPath)
+ NestUI.verbose("ant "+args.mkString(" "))
+
+ pushTranscript(s"ant ${args.mkString(" ")}")
+ nextTestActionExpectTrue("ant failed", ant(args, logFile)) && diffIsOk
+ }
+ catch { // *catch-all*
+ case e: Exception =>
+ NestUI.warning("caught "+e)
+ false
+ }
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ def extraClasspath = kind match {
+ case "specialized" => PathSettings.srcSpecLib.toString
+ case _ => ""
+ }
+ def extraJavaOptions = kind match {
+ case "instrumented" => "-javaagent:"+PathSettings.instrumentationAgentLib
+ case _ => ""
+ }
+
+ def runScalacheckTest() = runTestCommon {
+ NestUI verbose f"compilation of $testFile succeeded%n"
+
+ // this classloader is test specific: its parent contains library classes and others
+ val loader = {
+ import PathSettings.scalaCheck
+ val locations = List(outDir, scalaCheck.jfile) map (_.getAbsoluteFile.toURI.toURL)
+ ScalaClassLoader.fromURLs(locations, getClass.getClassLoader)
+ }
+ val logWriter = new PrintStream(new FileOutputStream(logFile), true)
+
+ def runInFramework(): Boolean = {
+ import org.scalatools.testing._
+ val f: Framework = loader.instantiate[Framework]("org.scalacheck.ScalaCheckFramework")
+ val logger = new Logger {
+ def ansiCodesSupported = false //params.env.isSet("colors")
+ def error(msg: String) = logWriter println msg
+ def warn(msg: String) = logWriter println msg
+ def info(msg: String) = logWriter println msg
+ def debug(msg: String) = logWriter println msg
+ def trace(t: Throwable) = t printStackTrace logWriter
+ }
+ var bad = 0
+ val handler = new EventHandler {
+ // testName, description, result, error
+ // Result = Success, Failure, Error, Skipped
+ def handle(event: Event): Unit = event.result match {
+ case Result.Success =>
+ //case Result.Skipped => // an exhausted test is skipped, therefore bad
+ case _ => bad += 1
+ }
+ }
+ val loggers = Array(logger)
+ val r = f.testRunner(loader, loggers).asInstanceOf[Runner2] // why?
+ val claas = "Test"
+ val fingerprint = f.tests collectFirst { case x: SubclassFingerprint if x.isModule => x }
+ val args = toolArgs("scalacheck")
+ vlog(s"Run $testFile with args $args")
+ // set the context class loader for scaladoc/scalacheck tests (FIX ME)
+ ScalaClassLoader(testRunParams.scalaCheckParentClassLoader).asContext {
+ r.run(claas, fingerprint.get, handler, args.toArray) // synchronous?
+ }
+ val ok = (bad == 0)
+ if (!ok) _transcript append logFile.fileContents
+ ok
+ }
+ try nextTestActionExpectTrue("ScalaCheck test failed", runInFramework()) finally logWriter.close()
+ }
+
+ def runResidentTest() = {
+ // simulate resident compiler loop
+ val prompt = "\nnsc> "
+ val (swr, wr) = newTestWriters()
+
+ NestUI.verbose(this+" running test "+fileBase)
+ val dir = parentFile
+ val resFile = new File(dir, fileBase + ".res")
+
+ // run compiler in resident mode
+ // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
+ val sourcedir = logFile.getParentFile.getAbsoluteFile
+ val sourcepath = sourcedir.getAbsolutePath+File.separator
+ NestUI.verbose("sourcepath: "+sourcepath)
+
+ val argList = List(
+ "-d", outDir.getAbsoluteFile.getPath,
+ "-Xresident",
+ "-sourcepath", sourcepath)
+
+ // configure input/output files
+ val logOut = new FileOutputStream(logFile)
+ val logWriter = new PrintStream(logOut, true)
+ val resReader = new BufferedReader(new FileReader(resFile))
+ val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
+
+ // create compiler
+ val settings = new Settings(workerError)
+ settings.sourcepath.value = sourcepath
+ settings.classpath.value = fileManager.CLASSPATH
+ val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
+ val command = new CompilerCommand(argList, settings)
+ object compiler extends Global(command.settings, reporter)
+
+ def resCompile(line: String): Boolean = {
+ // NestUI.verbose("compiling "+line)
+ val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
+ // NestUI.verbose("cmdArgs: "+cmdArgs)
+ val sett = new Settings(workerError)
+ sett.sourcepath.value = sourcepath
+ val command = new CompilerCommand(cmdArgs, sett)
+ // "scalac " + command.files.mkString(" ")
+ pushTranscript("scalac " + command.files.mkString(" "))
+ nextTestActionExpectTrue(
+ "compilation failed",
+ command.ok && {
+ (new compiler.Run) compile command.files
+ !reporter.hasErrors
+ }
+ )
+ }
+ def loop(): Boolean = {
+ logWriter.print(prompt)
+ resReader.readLine() match {
+ case null | "" => logWriter.close() ; true
+ case line => resCompile(line) && loop()
+ }
+ }
+ // res/t687.res depends on ignoring its compilation failure
+ // and just looking at the diff, so I made them all do that
+ // because this is long enough.
+ if (!Output.withRedirected(logWriter)(try loop() finally resReader.close()))
+ setLastState(genPass())
+
+ (diffIsOk, LogContext(logFile, swr, wr))
+ }
+
+ def run(): TestState = {
+ if (kind == "neg" || (kind endsWith "-neg")) runNegTest()
+ else kind match {
+ case "pos" => runTestCommon(true)
+ case "ant" => runAntTest()
+ case "scalacheck" => runScalacheckTest()
+ case "res" => runResidentTest()
+ case "scalap" => runScalapTest()
+ case "script" => runScriptTest()
+ case _ => runTestCommon(execTest(outDir, logFile) && diffIsOk)
+ }
+
+ lastState
+ }
+
+ def runScalapTest() = runTestCommon {
+ val isPackageObject = testFile.getName startsWith "package"
+ val className = testFile.getName.stripSuffix(".scala").capitalize + (if (!isPackageObject) "" else ".package")
+ val loader = ScalaClassLoader.fromURLs(List(outDir.toURI.toURL), this.getClass.getClassLoader)
+ val byteCode = ByteCode forClass (loader loadClass className)
+ val result = decompileScala(byteCode.bytes, isPackageObject)
+
+ logFile writeAll result
+ diffIsOk
+ }
+ def runScriptTest() = {
+ import scala.sys.process._
+ val (swr, wr) = newTestWriters()
+
+ val args = file2String(testFile changeExtension "args")
+ val cmdFile = if (isWin) testFile changeExtension "bat" else testFile
+ val succeeded = (((cmdFile + " " + args) #> logFile !) == 0) && diffIsOk
+
+ (succeeded, LogContext(logFile, swr, wr))
+ }
+
+ def cleanup() {
+ if (lastState.isOk)
+ logFile.delete()
+ if (!isPartestDebug)
+ Directory(outDir).deleteRecursively()
+ }
+}
+
+case class TestRunParams(val scalaCheckParentClassLoader: ScalaClassLoader)
+
+/** Extended by Ant- and ConsoleRunner for running a set of tests. */
+trait DirectRunner {
+ def fileManager: FileManager
+
+ import PartestDefaults.{ numThreads, waitTime }
+
+ Thread.setDefaultUncaughtExceptionHandler(
+ new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, t: Throwable) {
+ val t1 = Exceptional unwrap t
+ System.err.println(s"Uncaught exception on thread $thread: $t1")
+ t1.printStackTrace()
+ }
+ }
+ )
+ def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = {
+
+ NestUI.resetTestNumber(kindFiles.size)
+
+ // this special class loader is for the benefit of scaladoc tests, which need a class path
+ import PathSettings.{ testInterface, scalaCheck }
+ val allUrls = scalaCheck.toURL :: testInterface.toURL :: fileManager.latestUrls
+ val parentClassLoader = ScalaClassLoader fromURLs allUrls
+ // add scalacheck.jar to a special classloader, but use our loader as parent with test-interface
+ //val parentClassLoader = ScalaClassLoader fromURLs (List(scalaCheck.toURL), getClass().getClassLoader)
+ val pool = Executors newFixedThreadPool numThreads
+ val manager = new RunnerManager(kind, fileManager, TestRunParams(parentClassLoader))
+ val futures = kindFiles map (f => pool submit callable(manager runTest f))
+
+ pool.shutdown()
+ Try (pool.awaitTermination(waitTime) {
+ throw TimeoutException(waitTime)
+ }) match {
+ case Success(_) => futures map (_.get)
+ case Failure(e) =>
+ e match {
+ case TimeoutException(d) =>
+ NestUI warning "Thread pool timeout elapsed before all tests were complete!"
+ case ie: InterruptedException =>
+ NestUI warning "Thread pool was interrupted"
+ ie.printStackTrace()
+ }
+ pool.shutdownNow() // little point in continuing
+ // try to get as many completions as possible, in case someone cares
+ val results = for (f <- futures) yield {
+ try {
+ Some(f.get(0, NANOSECONDS))
+ } catch {
+ case _: Throwable => None
+ }
+ }
+ results.flatten
+ }
+ }
+}
+
+case class TimeoutException(duration: Duration) extends RuntimeException
+
+class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
+
+object LogContext {
+ def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
+ require (file != null)
+ new LogContext(file, Some((swr, wr)))
+ }
+ def apply(file: File): LogContext = new LogContext(file, None)
+}
+
+object Output {
+ object outRedirect extends Redirecter(out)
+ object errRedirect extends Redirecter(err)
+
+ System.setOut(outRedirect)
+ System.setErr(errRedirect)
+
+ import scala.util.DynamicVariable
+ private def out = java.lang.System.out
+ private def err = java.lang.System.err
+ private val redirVar = new DynamicVariable[Option[PrintStream]](None)
+
+ class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
+ def write(b: Int) = withStream(_ write b)
+
+ private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
+
+ override def write(b: Array[Byte]) = withStream(_ write b)
+ override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
+ override def flush = withStream(_.flush)
+ override def close = withStream(_.close)
+ })
+
+ // this supports thread-safe nested output redirects
+ def withRedirected[T](newstream: PrintStream)(func: => T): T = {
+ // note down old redirect destination
+ // this may be None in which case outRedirect and errRedirect print to stdout and stderr
+ val saved = redirVar.value
+ // set new redirecter
+ // this one will redirect both out and err to newstream
+ redirVar.value = Some(newstream)
+
+ try func
+ finally {
+ newstream.flush()
+ redirVar.value = saved
+ }
+ }
+}
+
+/** Use a Runner to run a test. */
+class RunnerManager(kind: String, fileManager: FileManager, params: TestRunParams) {
+ import fileManager._
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
+
+ def runTest(testFile: File): TestState = {
+ val runner = new Runner(testFile, fileManager, params)
+
+ // when option "--failed" is provided execute test only if log
+ // is present (which means it failed before)
+ if (fileManager.failed && !runner.logFile.canRead)
+ runner.genPass()
+ else {
+ val (state, elapsed) =
+ try timed(runner.run())
+ catch {
+ case t: Throwable => throw new RuntimeException(s"Error running $testFile", t)
+ }
+ NestUI.reportTest(state)
+ runner.cleanup()
+ state
+ }
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
deleted file mode 100644
index f80f6f38fd..0000000000
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ /dev/null
@@ -1,862 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-package scala.tools.partest
-package nest
-
-import java.io._
-import java.net.URL
-import java.util.{ Timer, TimerTask }
-
-import scala.tools.nsc.Properties.{ jdkHome, javaHome, propOrElse }
-import scala.util.Properties.{ envOrElse, isWin }
-import scala.tools.nsc.{ Settings, CompilerCommand, Global }
-import scala.tools.nsc.io.{ AbstractFile, PlainFile, Path, Directory, File => SFile }
-import scala.tools.nsc.reporters.ConsoleReporter
-import scala.tools.nsc.util.{ ClassPath, FakePos, ScalaClassLoader, stackTraceString }
-import ClassPath.{ join, split }
-import scala.tools.scalap.scalax.rules.scalasig.ByteCode
-import scala.collection.{ mutable, immutable }
-import scala.tools.nsc.interactive.{ BuildManager, RefinedBuildManager }
-import scala.sys.process._
-import java.util.concurrent.{ Executors, TimeUnit, TimeoutException }
-import PartestDefaults.{ javaCmd, javacCmd }
-
-class LogContext(val file: File, val writers: Option[(StringWriter, PrintWriter)])
-
-object LogContext {
- def apply(file: File, swr: StringWriter, wr: PrintWriter): LogContext = {
- require (file != null)
- new LogContext(file, Some((swr, wr)))
- }
- def apply(file: File): LogContext = new LogContext(file, None)
-}
-
-object Output {
- object outRedirect extends Redirecter(out)
- object errRedirect extends Redirecter(err)
-
- System.setOut(outRedirect)
- System.setErr(errRedirect)
-
- import scala.util.DynamicVariable
- private def out = java.lang.System.out
- private def err = java.lang.System.err
- private val redirVar = new DynamicVariable[Option[PrintStream]](None)
-
- class Redirecter(stream: PrintStream) extends PrintStream(new OutputStream {
- def write(b: Int) = withStream(_ write b)
-
- private def withStream(f: PrintStream => Unit) = f(redirVar.value getOrElse stream)
-
- override def write(b: Array[Byte]) = withStream(_ write b)
- override def write(b: Array[Byte], off: Int, len: Int) = withStream(_.write(b, off, len))
- override def flush = withStream(_.flush)
- override def close = withStream(_.close)
- })
-
- // this supports thread-safe nested output redirects
- def withRedirected[T](newstream: PrintStream)(func: => T): T = {
- // note down old redirect destination
- // this may be None in which case outRedirect and errRedirect print to stdout and stderr
- val saved = redirVar.value
- // set new redirecter
- // this one will redirect both out and err to newstream
- redirVar.value = Some(newstream)
-
- try func
- finally {
- newstream.flush()
- redirVar.value = saved
- }
- }
-}
-
-class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunParams) {
- import fileManager._
-
- val compileMgr = new CompileManager(fileManager)
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
- fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
-
- private def compareFiles(f1: File, f2: File): String =
- try fileManager.compareFiles(f1, f2)
- catch { case t: Exception => t.toString }
-
- /** This does something about absolute paths and file separator
- * chars before diffing.
- */
- private def replaceSlashes(dir: File, s: String): String = {
- val base = (dir.getAbsolutePath + File.separator).replace('\\', '/')
- var regex = """\Q%s\E""" format base
- if (isWin) regex = "(?i)" + regex
- s.replace('\\', '/').replaceAll(regex, "")
- }
-
- private def workerError(msg: String): Unit = System.err.println("Error: " + msg)
-
- private def printInfoStart(file: File, printer: PrintWriter) {
- NestUI.outline("testing: ", printer)
- val filesdir = file.getAbsoluteFile.getParentFile.getParentFile
- val testdir = filesdir.getParentFile
- val totalWidth = 56
- val name = {
- // 1. try with [...]/files/run/test.scala
- val name = file.getAbsolutePath drop testdir.getAbsolutePath.length
- if (name.length <= totalWidth) name
- // 2. try with [...]/run/test.scala
- else file.getAbsolutePath drop filesdir.getAbsolutePath.length
- }
- NestUI.normal("[...]%s%s".format(name, " " * (totalWidth - name.length)), printer)
- }
-
- private def printInfoEnd(success: Boolean, printer: PrintWriter) {
- NestUI.normal("[", printer)
- if (success) NestUI.success(" OK ", printer)
- else NestUI.failure("FAILED", printer)
- NestUI.normal("]\n", printer)
- }
-
- private def printInfoTimeout(printer: PrintWriter) {
- NestUI.normal("[", printer)
- NestUI.failure("TIMOUT", printer)
- NestUI.normal("]\n", printer)
- }
-
- private def javac(outDir: File, files: List[File], output: File): CompilationOutcome = {
- // compile using command-line javac compiler
- val args = Seq(
- javacCmd,
- "-d",
- outDir.getAbsolutePath,
- "-classpath",
- join(outDir.toString, CLASSPATH)
- ) ++ files.map("" + _)
-
- try if (runCommand(args, output)) CompileSuccess else CompileFailed
- catch exHandler(output, "javac command failed:\n" + args.map(" " + _ + "\n").mkString + "\n", CompilerCrashed)
- }
-
- /** Runs command redirecting standard out and error out to output file.
- * Overloaded to accept a sequence of arguments.
- */
- private def runCommand(args: Seq[String], outFile: File): Boolean = {
- NestUI.verbose("running command:\n"+args.map(" " + _ + "\n").mkString)
- runCommandImpl(Process(args), outFile)
- }
-
- /** Runs command redirecting standard out and error out to output file.
- * Overloaded to accept a single string = concatenated command + arguments.
- */
- private def runCommand(command: String, outFile: File): Boolean = {
- NestUI.verbose("running command:"+command)
- runCommandImpl(Process(command), outFile)
- }
-
- private def runCommandImpl(process: => ProcessBuilder, outFile: File): Boolean = {
- val exitCode = (process #> outFile !)
- // normalize line endings
- // System.getProperty("line.separator") should be "\n" here
- // so reading a file and writing it back should convert all CRLFs to LFs
- SFile(outFile).printlnAll(SFile(outFile).lines.toList: _*)
- exitCode == 0
- }
-
- @inline private def isJava(f: File) = SFile(f) hasExtension "java"
- @inline private def isScala(f: File) = SFile(f) hasExtension "scala"
- @inline private def isJavaOrScala(f: File) = isJava(f) || isScala(f)
-
- private def outputLogFile(logFile: File) {
- val lines = SFile(logFile).lines
- if (lines.nonEmpty) {
- NestUI.normal("Log file '" + logFile + "': \n")
- lines foreach (x => NestUI.normal(x + "\n"))
- }
- }
- private def logStackTrace(logFile: File, t: Throwable, msg: String): Boolean = {
- SFile(logFile).writeAll(msg, stackTraceString(t))
- outputLogFile(logFile) // if running the test threw an exception, output log file
- false
- }
-
- private def exHandler[T](logFile: File, msg: String, value: T): PartialFunction[Throwable, T] = {
- case e: Exception => logStackTrace(logFile, e, msg) ; value
- }
-
- class Runner(testFile: File) {
- var testDiff: String = ""
- var passed: Option[Boolean] = None
-
- val fileBase = basename(testFile.getName)
- val logFile = fileManager.getLogFile(testFile, kind)
- val parent = testFile.getParentFile
- val outDir = new File(parent, "%s-%s.obj".format(fileBase, kind))
- def toDelete = if (isPartestDebug) Nil else List(
- if (passed exists (x => x)) Some(logFile) else None,
- if (outDir.isDirectory) Some(outDir) else None
- ).flatten
-
- private def createOutputDir(): File = {
- outDir.mkdirs()
- outDir
- }
-
- private def execTest(outDir: File, logFile: File, classpathPrefix: String = "", javaOpts: String = ""): Boolean = {
- // check whether there is a ".javaopts" file
- val argsFile = new File(logFile.getParentFile, fileBase + ".javaopts")
- val argString = file2String(argsFile)
- if (argString != "")
- NestUI.verbose("Found javaopts file '%s', using options: '%s'".format(argsFile, argString))
-
- val testFullPath = {
- val d = new File(logFile.getParentFile, fileBase)
- if (d.isDirectory) d.getAbsolutePath
- else {
- val f = new File(logFile.getParentFile, fileBase + ".scala")
- if (f.isFile) f.getAbsolutePath
- else ""
- }
- }
-
- // Note! As this currently functions, JAVA_OPTS must precede argString
- // because when an option is repeated to java only the last one wins.
- // That means until now all the .javaopts files were being ignored because
- // they all attempt to change options which are also defined in
- // partest.java_opts, leading to debug output like:
- //
- // debug: Found javaopts file 'files/shootout/message.scala-2.javaopts', using options: '-Xss32k'
- // debug: java -Xss32k -Xss2m -Xms256M -Xmx1024M -classpath [...]
- val extras = if (isPartestDebug) List("-Dpartest.debug=true") else Nil
- val propertyOptions = List(
- "-Dfile.encoding=UTF-8",
- "-Djava.library.path="+logFile.getParentFile.getAbsolutePath,
- "-Dpartest.output="+outDir.getAbsolutePath,
- "-Dpartest.lib="+LATEST_LIB,
- "-Dpartest.reflect="+LATEST_REFLECT,
- "-Dpartest.comp="+LATEST_COMP,
- "-Dpartest.cwd="+outDir.getParent,
- "-Dpartest.test-path="+testFullPath,
- "-Dpartest.testname="+fileBase,
- "-Djavacmd="+javaCmd,
- "-Djavaccmd="+javacCmd,
- "-Duser.language=en",
- "-Duser.country=US"
- ) ++ extras
-
- val classpath = if (classpathPrefix != "") join(classpathPrefix, CLASSPATH) else CLASSPATH
- val cmd = javaCmd +: (
- (JAVA_OPTS.split(' ') ++ javaOpts.split(' ') ++ argString.split(' ')).map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- join(outDir.toString, classpath)
- ) ++ propertyOptions ++ Seq(
- "scala.tools.nsc.MainGenericRunner",
- "-usejavacp",
- "Test",
- "jvm"
- )
- )
-
- runCommand(cmd, logFile)
- }
-
- private def getCheckFilePath(dir: File, suffix: String = "") = {
- def chkFile(s: String) = (Directory(dir) / "%s%s.check".format(fileBase, s)).toFile
-
- if (chkFile("").isFile || suffix == "") chkFile("")
- else chkFile("-" + suffix)
- }
- private def getCheckFile(dir: File) = Some(getCheckFilePath(dir, kind)) filter (_.canRead)
-
- private def compareOutput(dir: File, logFile: File): String = {
- val checkFile = getCheckFilePath(dir, kind)
- val diff =
- if (checkFile.canRead) compareFiles(logFile, checkFile.jfile)
- else file2String(logFile)
-
- // if check file exists, compare with log file
- if (diff != "" && fileManager.updateCheck) {
- NestUI.verbose("Updating checkfile " + checkFile.jfile)
- val toWrite = if (checkFile.exists) checkFile else getCheckFilePath(dir, "")
- toWrite writeAll file2String(logFile)
- ""
- }
- else diff
- }
-
- def newTestWriters() = {
- val swr = new StringWriter
- val wr = new PrintWriter(swr, true)
- // diff = ""
-
- ((swr, wr))
- }
-
- def fail(what: Any) = {
- NestUI.verbose("scalac: compilation of "+what+" failed\n")
- false
- }
- def diffCheck(testFile: File, diff: String) = {
- testDiff = diff
- testDiff == ""
- }
-
- /** 1. Creates log file and output directory.
- * 2. Runs script function, providing log file and output directory as arguments.
- */
- def runInContext(file: File, script: (File, File) => Boolean): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir()
- NestUI.verbose("output directory: "+outDir)
-
- // run test-specific code
- val succeeded = try {
- if (isPartestDebug) {
- val (result, millis) = timed(script(logFile, outDir))
- fileManager.recordTestTiming(file.getPath, millis)
- result
- }
- else script(logFile, outDir)
- }
- catch exHandler(logFile, "", false)
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def groupedFiles(dir: File): List[List[File]] = {
- val testFiles = dir.listFiles.toList filter isJavaOrScala
-
- def isInGroup(f: File, num: Int) = SFile(f).stripExtension endsWith ("_" + num)
- val groups = (0 to 9).toList map (num => (testFiles filter (f => isInGroup(f, num))).sorted)
- val noGroupSuffix = (testFiles filterNot (groups.flatten contains)).sorted
-
- noGroupSuffix :: groups filterNot (_.isEmpty)
- }
-
- def compileFilesIn(dir: File, logFile: File, outDir: File): CompilationOutcome = {
- def compileGroup(g: List[File]): CompilationOutcome = {
- val (scalaFiles, javaFiles) = g partition isScala
- val allFiles = javaFiles ++ scalaFiles
-
- List(1, 2, 3).foldLeft(CompileSuccess: CompilationOutcome) {
- case (CompileSuccess, 1) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), allFiles, kind, logFile) // java + scala
- case (CompileSuccess, 2) if javaFiles.nonEmpty => javac(outDir, javaFiles, logFile) // java
- case (CompileSuccess, 3) if scalaFiles.nonEmpty => compileMgr.attemptCompile(Some(outDir), scalaFiles, kind, logFile) // scala
- case (outcome, _) => outcome
- }
- }
- groupedFiles(dir).foldLeft(CompileSuccess: CompilationOutcome) {
- case (CompileSuccess, files) => compileGroup(files)
- case (outcome, _) => outcome
- }
- }
-
- def runTestCommon(file: File, expectFailure: Boolean)(
- onSuccess: (File, File) => Boolean,
- onFail: (File, File) => Unit = (_, _) => ()): (Boolean, LogContext) =
- {
- runInContext(file, (logFile: File, outDir: File) => {
- val outcome = (
- if (file.isDirectory) compileFilesIn(file, logFile, outDir)
- else compileMgr.attemptCompile(None, List(file), kind, logFile)
- )
- val result = (
- if (expectFailure) outcome.isNegative
- else outcome.isPositive
- )
-
- if (result) onSuccess(logFile, outDir)
- else { onFail(logFile, outDir) ; false }
- })
- }
-
- def runJvmTest(file: File): (Boolean, LogContext) =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- execTest(outDir, logFile, PathSettings.srcCodeLib.toString) && {
- // cannot replace paths here since this also inverts slashes
- // which affects a bunch of tests
- //fileManager.mapFile(logFile, replaceSlashes(dir, _))
- diffCheck(file, compareOutput(dir, logFile))
- }
- })
-
- // Apache Ant 1.6 or newer
- def ant(args: Seq[String], output: File): Boolean = {
- val antDir = Directory(envOrElse("ANT_HOME", "/opt/ant/"))
- val antLibDir = Directory(antDir / "lib")
- val antLauncherPath = SFile(antLibDir / "ant-launcher.jar").path
- val antOptions =
- if (NestUI._verbose) List("-verbose", "-noinput")
- else List("-noinput")
- val cmd = javaCmd +: (
- JAVA_OPTS.split(' ').map(_.trim).filter(_ != "") ++ Seq(
- "-classpath",
- antLauncherPath,
- "org.apache.tools.ant.launch.Launcher"
- ) ++ antOptions ++ args
- )
-
- try runCommand(cmd, output)
- catch exHandler(output, "ant command '" + cmd + "' failed:\n", false)
- }
-
- def runAntTest(file: File): (Boolean, LogContext) = {
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
-
- val succeeded = try {
- val binary = "-Dbinary="+(
- if (fileManager.LATEST_LIB endsWith "build/quick/classes/library") "quick"
- else if (fileManager.LATEST_LIB endsWith "build/pack/lib/scala-library.jar") "pack"
- else if (fileManager.LATEST_LIB endsWith "dists/latest/lib/scala-library.jar/") "latest"
- else "installed"
- )
- val args = Array(binary, "-logfile", logFile.path, "-file", file.path)
- NestUI.verbose("ant "+args.mkString(" "))
- ant(args, logFile) && diffCheck(file, compareOutput(file.getParentFile, logFile))
- }
- catch { // *catch-all*
- case e: Exception =>
- NestUI.verbose("caught "+e)
- false
- }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- def runSpecializedTest(file: File): (Boolean, LogContext) =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding the instrumented library to the classpath
- ( execTest(outDir, logFile, PathSettings.srcSpecLib.toString) &&
- diffCheck(file, compareOutput(dir, logFile))
- )
- })
-
- def runInstrumentedTest(file: File): (Boolean, LogContext) =
- runTestCommon(file, expectFailure = false)((logFile, outDir) => {
- val dir = file.getParentFile
-
- // adding the javagent option with path to instrumentation agent
- execTest(outDir, logFile, javaOpts = "-javaagent:"+PathSettings.instrumentationAgentLib) &&
- diffCheck(file, compareOutput(dir, logFile))
- })
-
- def processSingleFile(file: File): (Boolean, LogContext) = kind match {
- case "scalacheck" =>
- val succFn: (File, File) => Boolean = { (logFile, outDir) =>
- NestUI.verbose("compilation of "+file+" succeeded\n")
-
- val outURL = outDir.getAbsoluteFile.toURI.toURL
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
-
- Output.withRedirected(logWriter) {
- // this classloader is test specific: its parent contains library classes and others
- ScalaClassLoader.fromURLs(List(outURL), params.scalaCheckParentClassLoader).run("Test", Nil)
- }
-
- NestUI.verbose(file2String(logFile))
- // obviously this must be improved upon
- val lines = SFile(logFile).lines map (_.trim) filterNot (_ == "") toBuffer;
- lines.forall(x => !x.startsWith("!")) || {
- NestUI.normal("ScalaCheck test failed. Output:\n")
- lines foreach (x => NestUI.normal(x + "\n"))
- false
- }
- }
- runTestCommon(file, expectFailure = false)(
- succFn,
- (logFile, outDir) => outputLogFile(logFile)
- )
-
- case "pos" =>
- runTestCommon(file, expectFailure = false)(
- (logFile, outDir) => true,
- (_, _) => ()
- )
-
- case "neg" =>
- runTestCommon(file, expectFailure = true)((logFile, outDir) => {
- // compare log file to check file
- val dir = file.getParentFile
-
- // diff is contents of logFile
- fileManager.mapFile(logFile, replaceSlashes(dir, _))
- diffCheck(file, compareOutput(dir, logFile))
- })
-
- case "run" | "jvm" =>
- runJvmTest(file)
-
- case "specialized" =>
- runSpecializedTest(file)
-
- case "instrumented" =>
- runInstrumentedTest(file)
-
- case "presentation" =>
- runJvmTest(file) // for the moment, it's exactly the same as for a run test
-
- case "ant" =>
- runAntTest(file)
-
- case "buildmanager" =>
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
- val (outDir, testFile, changesDir) = {
- if (!file.isDirectory)
- (null, null, null)
- else {
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir()
- val testFile = new File(file, fileBase + ".test")
- val changesDir = new File(file, fileBase + ".changes")
-
- if (changesDir.isFile || !testFile.isFile) {
- // if changes exists then it has to be a dir
- if (!testFile.isFile) NestUI.verbose("invalid build manager test file")
- if (changesDir.isFile) NestUI.verbose("invalid build manager changes directory")
- (null, null, null)
- }
- else {
- copyTestFiles(file, outDir)
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- (outDir, testFile, changesDir)
- }
- }
- }
- if (outDir == null)
- return (false, LogContext(logFile))
-
- // Pre-conditions satisfied
- val sourcepath = outDir.getAbsolutePath+File.separator
-
- // configure input/output files
- val logWriter = new PrintStream(new FileOutputStream(logFile), true)
- val testReader = new BufferedReader(new FileReader(testFile))
- val logConsoleWriter = new PrintWriter(logWriter, true)
-
- // create proper settings for the compiler
- val settings = new Settings(workerError)
- settings.outdir.value = outDir.getAbsoluteFile.getAbsolutePath
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- settings.Ybuildmanagerdebug.value = true
-
- // simulate Build Manager loop
- val prompt = "builder > "
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val bM: BuildManager =
- new RefinedBuildManager(settings) {
- override protected def newCompiler(settings: Settings) =
- new BuilderGlobal(settings, reporter)
- }
-
- def testCompile(line: String): Boolean = {
- NestUI.verbose("compiling " + line)
- val args = (line split ' ').toList
- val command = new CompilerCommand(args, settings)
- command.ok && {
- bM.update(filesToSet(settings.sourcepath.value, command.files), Set.empty)
- !reporter.hasErrors
- }
- }
-
- val updateFiles = (line: String) => {
- NestUI.verbose("updating " + line)
- (line split ' ').toList forall (u =>
- (u split "=>").toList match {
- case origFileName::(newFileName::Nil) =>
- val newFile = new File(changesDir, newFileName)
- if (newFile.isFile) {
- val v = overwriteFileWith(new File(outDir, origFileName), newFile)
- if (!v)
- NestUI.verbose("'update' operation on " + u + " failed")
- v
- } else {
- NestUI.verbose("File " + newFile + " is invalid")
- false
- }
- case a =>
- NestUI.verbose("Other =: " + a)
- false
- }
- )
- }
-
- def loop(): Boolean = {
- testReader.readLine() match {
- case null | "" =>
- NestUI.verbose("finished")
- true
- case s if s startsWith ">>update " =>
- updateFiles(s stripPrefix ">>update ") && loop()
- case s if s startsWith ">>compile " =>
- val files = s stripPrefix ">>compile "
- logWriter.println(prompt + files)
- // In the end, it can finish with an error
- if (testCompile(files)) loop()
- else {
- val t = testReader.readLine()
- (t == null) || (t == "")
- }
- case s =>
- NestUI.verbose("wrong command in test file: " + s)
- false
- }
- }
-
- Output.withRedirected(logWriter) {
- try loop()
- finally testReader.close()
- }
- fileManager.mapFile(logFile, replaceSlashes(new File(sourcepath), _))
-
- (diffCheck(file, compareOutput(file, logFile)), LogContext(logFile, swr, wr))
-
- case "res" => {
- // simulate resident compiler loop
- val prompt = "\nnsc> "
-
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val dir = file.getParentFile
- val outDir = createOutputDir()
- val resFile = new File(dir, fileBase + ".res")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- //NestUI.verbose("logFileErr: "+logFileErr)
- NestUI.verbose("resFile: "+resFile)
-
- // run compiler in resident mode
- // $SCALAC -d "$os_dstbase".obj -Xresident -sourcepath . "$@"
- val sourcedir = logFile.getParentFile.getAbsoluteFile
- val sourcepath = sourcedir.getAbsolutePath+File.separator
- NestUI.verbose("sourcepath: "+sourcepath)
-
- val argList = List(
- "-d", outDir.getAbsoluteFile.getPath,
- "-Xresident",
- "-sourcepath", sourcepath)
-
- // configure input/output files
- val logOut = new FileOutputStream(logFile)
- val logWriter = new PrintStream(logOut, true)
- val resReader = new BufferedReader(new FileReader(resFile))
- val logConsoleWriter = new PrintWriter(new OutputStreamWriter(logOut), true)
-
- // create compiler
- val settings = new Settings(workerError)
- settings.sourcepath.value = sourcepath
- settings.classpath.value = fileManager.CLASSPATH
- val reporter = new ConsoleReporter(settings, scala.Console.in, logConsoleWriter)
- val command = new CompilerCommand(argList, settings)
- object compiler extends Global(command.settings, reporter)
-
- val resCompile = (line: String) => {
- NestUI.verbose("compiling "+line)
- val cmdArgs = (line split ' ').toList map (fs => new File(dir, fs).getAbsolutePath)
- NestUI.verbose("cmdArgs: "+cmdArgs)
- val sett = new Settings(workerError)
- sett.sourcepath.value = sourcepath
- val command = new CompilerCommand(cmdArgs, sett)
- command.ok && {
- (new compiler.Run) compile command.files
- !reporter.hasErrors
- }
- }
-
- def loop(action: String => Boolean): Boolean = {
- logWriter.print(prompt)
- resReader.readLine() match {
- case null | "" => logWriter.flush() ; true
- case line => action(line) && loop(action)
- }
- }
-
- Output.withRedirected(logWriter) {
- try loop(resCompile)
- finally resReader.close()
- }
- fileManager.mapFile(logFile, replaceSlashes(dir, _))
-
- (diffCheck(file, compareOutput(dir, logFile)), LogContext(logFile, swr, wr))
- }
-
- case "shootout" =>
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
- val outDir = createOutputDir()
-
- // 2. define file {outDir}/test.scala that contains code to compile/run
- val testFile = new File(outDir, "test.scala")
- NestUI.verbose("outDir: "+outDir)
- NestUI.verbose("logFile: "+logFile)
- NestUI.verbose("testFile: "+testFile)
-
- // 3. cat {test}.scala.runner {test}.scala > testFile
- val runnerFile = new File(parent, fileBase+".scala.runner")
- val bodyFile = new File(parent, fileBase+".scala")
- SFile(testFile).writeAll(
- file2String(runnerFile),
- file2String(bodyFile)
- )
-
- // 4. compile testFile
- val ok = compileMgr.attemptCompile(None, List(testFile), kind, logFile) eq CompileSuccess
- NestUI.verbose("compilation of " + testFile + (if (ok) "succeeded" else "failed"))
- val result = ok && {
- execTest(outDir, logFile) && {
- NestUI.verbose(this+" finished running "+fileBase)
- diffCheck(file, compareOutput(parent, logFile))
- }
- }
-
- (result, LogContext(logFile, swr, wr))
-
- case "scalap" =>
- runInContext(file, (logFile: File, outDir: File) => {
- val sourceDir = Directory(if (file.isFile) file.getParent else file)
- val sources = sourceDir.files filter (_ hasExtension "scala") map (_.jfile) toList
- val results = sourceDir.files filter (_.name == "result.test") map (_.jfile) toList
-
- if (sources.length != 1 || results.length != 1) {
- NestUI.warning("Misconfigured scalap test directory: " + sourceDir + " \n")
- false
- }
- else {
- val resFile = results.head
- // 2. Compile source file
-
- if (!compileMgr.attemptCompile(Some(outDir), sources, kind, logFile).isPositive) {
- NestUI.normal("compilerMgr failed to compile %s to %s".format(sources mkString ", ", outDir))
- false
- }
- else {
- // 3. Decompile file and compare results
- val isPackageObject = sourceDir.name startsWith "package"
- val className = sourceDir.name.capitalize + (if (!isPackageObject) "" else ".package")
- val url = outDir.toURI.toURL
- val loader = ScalaClassLoader.fromURLs(List(url), this.getClass.getClassLoader)
- val clazz = loader.loadClass(className)
-
- val byteCode = ByteCode.forClass(clazz)
- val result = scala.tools.scalap.Main.decompileScala(byteCode.bytes, isPackageObject)
-
- SFile(logFile) writeAll result
- diffCheck(file, compareFiles(logFile, resFile))
- }
- }
- })
-
- case "script" =>
- val (swr, wr) = newTestWriters()
- printInfoStart(file, wr)
-
- NestUI.verbose(this+" running test "+fileBase)
-
- // check whether there is an args file
- val argsFile = new File(file.getParentFile, fileBase+".args")
- NestUI.verbose("argsFile: "+argsFile)
- val argString = file2String(argsFile)
- val succeeded = try {
- val cmdString =
- if (isWin) {
- val batchFile = new File(file.getParentFile, fileBase+".bat")
- NestUI.verbose("batchFile: "+batchFile)
- batchFile.getAbsolutePath
- }
- else file.getAbsolutePath
-
- val ok = runCommand(cmdString+argString, logFile)
- ( ok && diffCheck(file, compareOutput(file.getParentFile, logFile)) )
- }
- catch { case e: Exception => NestUI.verbose("caught "+e) ; false }
-
- (succeeded, LogContext(logFile, swr, wr))
- }
-
- private def crashContext(t: Throwable): LogContext = {
- try {
- logStackTrace(logFile, t, "Possible compiler crash during test of: " + testFile + "\n")
- LogContext(logFile)
- }
- catch { case t: Throwable => LogContext(null) }
- }
-
- def run(): (Boolean, LogContext) = {
- val result = try processSingleFile(testFile) catch { case t: Throwable => (false, crashContext(t)) }
- passed = Some(result._1)
- result
- }
-
- def reportResult(writers: Option[(StringWriter, PrintWriter)]) {
- writers foreach { case (swr, wr) =>
- if (passed.isEmpty) printInfoTimeout(wr)
- else printInfoEnd(passed.get, wr)
- wr.flush()
- swr.flush()
- NestUI.normal(swr.toString)
-
- if (passed exists (x => !x)) {
- if (fileManager.showDiff || isPartestDebug)
- NestUI.normal(testDiff)
- if (fileManager.showLog)
- showLog(logFile)
- }
- }
- toDelete foreach (_.deleteRecursively())
- }
- }
-
- def runTest(testFile: File): TestState = {
- val runner = new Runner(testFile)
- // when option "--failed" is provided execute test only if log
- // is present (which means it failed before)
- if (fileManager.failed && !runner.logFile.canRead)
- return TestState.Ok
-
- // sys addShutdownHook cleanup()
- val ((success, ctx), elapsed) = timed(runner.run())
- val state = if (success) TestState.Ok else TestState.Fail
-
- runner.reportResult(ctx.writers)
- state
- }
-
- private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] =
- fs flatMap (s => Option(AbstractFile getFile (pre + s))) toSet
-
- private def copyTestFiles(testDir: File, destDir: File) {
- val invalidExts = List("changes", "svn", "obj")
- testDir.listFiles.toList filter (
- f => (isJavaOrScala(f) && f.isFile) ||
- (f.isDirectory && !(invalidExts.contains(SFile(f).extension)))) foreach
- { f => fileManager.copyFile(f, destDir) }
- }
-
- private def showLog(logFile: File) {
- file2String(logFile) match {
- case "" if logFile.canRead => ()
- case "" => NestUI.failure("Couldn't open log file: " + logFile + "\n")
- case s => NestUI.normal(s)
- }
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/RunnerUtils.scala b/src/partest/scala/tools/partest/nest/RunnerUtils.scala
deleted file mode 100644
index 6707a9338a..0000000000
--- a/src/partest/scala/tools/partest/nest/RunnerUtils.scala
+++ /dev/null
@@ -1,29 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-object RunnerUtils {
- def splitArgs(str: String) = str split "\\s" filterNot (_ == "") toList
-
- def searchPath(option: String, as: List[String]): Option[String] = as match {
- case `option` :: r :: _ => Some(r)
- case _ :: rest => searchPath(option, rest)
- case Nil => None
- }
-
- def searchAndRemovePath(option: String, as: List[String]) = (as indexOf option) match {
- case -1 => (None, as)
- case idx => (Some(as(idx + 1)), (as take idx) ::: (as drop (idx + 2)))
- }
-
- def searchAndRemoveOption(option: String, as: List[String]) = (as indexOf option) match {
- case -1 => (false, as)
- case idx => (true, (as take idx) ::: (as drop (idx + 1)))
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 20f9c701d5..1cf3aa858f 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -1,3 +1,6 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ */
package scala.tools.partest
package nest
@@ -21,7 +24,7 @@ object SBTRunner extends DirectRunner {
val testRootDir: Directory = Directory(testRootPath)
}
- def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String):java.util.Map[String, TestState] = {
+ def reflectiveRunTestsForFiles(kindFiles: Array[File], kind: String): java.util.List[TestState] = {
def failedOnlyIfRequired(files:List[File]):List[File]={
if (fileManager.failed) files filter (x => fileManager.logFileExists(x, kind)) else files
}
@@ -33,7 +36,7 @@ object SBTRunner extends DirectRunner {
scalacOptions: Seq[String] = Seq(),
justFailedTests: Boolean = false)
- def mainReflect(args: Array[String]): java.util.Map[String, String] = {
+ def mainReflect(args: Array[String]): java.util.List[TestState] = {
setProp("partest.debug", "true")
val Argument = new scala.util.matching.Regex("-(.*)")
@@ -46,7 +49,7 @@ object SBTRunner extends DirectRunner {
case x => sys.error("Unknown command line options: " + x)
}
val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS ++= config.scalacOptions
+ fileManager.SCALAC_OPTS = config.scalacOptions
fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
def findClasspath(jar: String, name: String): Option[String] = {
@@ -67,22 +70,14 @@ object SBTRunner extends DirectRunner {
// TODO - Make this a flag?
//fileManager.updateCheck = true
// Now run and report...
- val runs = config.tests.filterNot(_._2.isEmpty)
- (for {
- (testType, files) <- runs
- (path, result) <- reflectiveRunTestsForFiles(files,testType).asScala
- } yield (path, fixResult(result))).seq.asJava
- }
- def fixResult(result: TestState): String = result match {
- case TestState.Ok => "OK"
- case TestState.Fail => "FAIL"
- case TestState.Timeout => "TIMEOUT"
+ val runs = config.tests.filterNot(_._2.isEmpty)
+ val result = runs.toList flatMap { case (kind, files) => reflectiveRunTestsForFiles(files, kind).asScala }
+
+ result.asJava
}
+
def main(args: Array[String]): Unit = {
- val failures = (
- for ((path, result) <- mainReflect(args).asScala ; if result != TestState.Ok) yield
- path + ( if (result == TestState.Fail) " [FAILED]" else " [TIMEOUT]" )
- )
+ val failures = mainReflect(args).asScala collect { case s if !s.isOk => s.longStatus }
// Re-list all failures so we can go figure out what went wrong.
failures foreach System.err.println
if(!failures.isEmpty) sys.exit(1)
diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala
new file mode 100644
index 0000000000..dc155b1787
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/StreamCapture.scala
@@ -0,0 +1,53 @@
+/* NEST (New Scala Test)
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala.tools.partest
+package nest
+
+import java.io.{ Console => _, _ }
+
+object StreamCapture {
+ case class Captured[T](stdout: String, stderr: String, result: T) {
+ override def toString = s"""
+ |result: $result
+ |[stdout]
+ |$stdout
+ |[stderr]
+ |$stderr""".stripMargin.trim
+ }
+
+ private def mkStream = {
+ val swr = new StringWriter
+ val wr = new PrintWriter(swr, true)
+ val ostream = new PrintStream(new OutputStream { def write(b: Int): Unit = wr write b }, true) // autoFlush = true
+
+ (ostream, () => { ostream.close() ; swr.toString })
+ }
+
+ def savingSystem[T](body: => T): T = {
+ val savedOut = System.out
+ val savedErr = System.err
+ try body
+ finally {
+ System setErr savedErr
+ System setOut savedOut
+ }
+ }
+
+ def apply[T](body: => T): Captured[T] = {
+ val (outstream, stdoutFn) = mkStream
+ val (errstream, stderrFn) = mkStream
+
+ val result = savingSystem {
+ System setOut outstream
+ System setErr errstream
+ Console.withOut(outstream) {
+ Console.withErr(errstream) {
+ body
+ }
+ }
+ }
+ Captured(stdoutFn(), stderrFn(), result)
+ }
+}
diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala
deleted file mode 100644
index 87177772ab..0000000000
--- a/src/partest/scala/tools/partest/nest/TestFile.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Philipp Haller
- */
-
-// $Id$
-
-package scala.tools.partest
-package nest
-
-import java.io.{ File => JFile }
-import scala.tools.nsc.Settings
-import scala.tools.nsc.util.ClassPath
-import scala.tools.nsc.io._
-import scala.util.Properties.{ propIsSet, propOrElse, setProp }
-
-trait TestFileCommon {
- def file: JFile
- def kind: String
-
- val dir = file.toAbsolute.parent
- val fileBase = file.stripExtension
- val flags = dir / (fileBase + ".flags") ifFile (f => f.slurp().trim)
-
- lazy val objectDir = dir / (fileBase + "-" + kind + ".obj") createDirectory true
- def setOutDirTo = objectDir
-}
-
-abstract class TestFile(val kind: String) extends TestFileCommon {
- def file: JFile
- def fileManager: FileManager
-
- def defineSettings(settings: Settings, setOutDir: Boolean) = {
- settings.classpath append dir.path
- if (setOutDir)
- settings.outputDirs setSingleOutput setOutDirTo.path
-
- // adding codelib.jar to the classpath
- // codelib provides the possibility to override standard reify
- // this shields the massive amount of reification tests from changes in the API
- settings.classpath prepend PathSettings.srcCodeLib.toString
- if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", ""))
-
- // have to catch bad flags somewhere
- (flags forall (f => settings.processArgumentString(f)._1)) && {
- settings.classpath append fileManager.CLASSPATH
- true
- }
- }
-
- override def toString(): String = "%s %s".format(kind, file)
-}
-
-case class PosTestFile(file: JFile, fileManager: FileManager) extends TestFile("pos")
-case class NegTestFile(file: JFile, fileManager: FileManager) extends TestFile("neg")
-case class RunTestFile(file: JFile, fileManager: FileManager) extends TestFile("run")
-case class BuildManagerTestFile(file: JFile, fileManager: FileManager) extends TestFile("bm")
-case class ScalaCheckTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalacheck")
-case class JvmTestFile(file: JFile, fileManager: FileManager) extends TestFile("jvm")
-case class ShootoutTestFile(file: JFile, fileManager: FileManager) extends TestFile("shootout") {
- override def setOutDirTo = file.parent
-}
-case class ScalapTestFile(file: JFile, fileManager: FileManager) extends TestFile("scalap") {
- override def setOutDirTo = file.parent
-}
-case class SpecializedTestFile(file: JFile, fileManager: FileManager) extends TestFile("specialized") {
- override def defineSettings(settings: Settings, setOutDir: Boolean): Boolean = {
- super.defineSettings(settings, setOutDir) && {
- // add the instrumented library version to classpath
- settings.classpath prepend PathSettings.srcSpecLib.toString
- // @partest maintainer: if we use a custom Scala build (specified via --classpath)
- // then the classes provided by it will come earlier than instrumented.jar in the resulting classpath
- // this entire classpath business needs a thorough solution
- if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcSpecLib.toString + ";" + propOrElse("java.class.path", ""))
- true
- }
- }
-}
-case class PresentationTestFile(file: JFile, fileManager: FileManager) extends TestFile("presentation")
-case class AntTestFile(file: JFile, fileManager: FileManager) extends TestFile("ant")
-case class InstrumentedTestFile(file: JFile, fileManager: FileManager) extends TestFile("instrumented")
diff --git a/src/partest/scala/tools/partest/package.scala b/src/partest/scala/tools/partest/package.scala
index d38ce692d7..5a1afeb77f 100644
--- a/src/partest/scala/tools/partest/package.scala
+++ b/src/partest/scala/tools/partest/package.scala
@@ -4,35 +4,124 @@
package scala.tools
-import java.io.{ FileNotFoundException, File => JFile }
-import nsc.io.{ Path, Directory, File => SFile }
-import scala.tools.util.PathResolver
-import nsc.Properties.{ propOrElse, propOrNone, propOrEmpty }
+import java.util.concurrent.{ Callable, ExecutorService }
+import scala.concurrent.duration.Duration
import scala.sys.process.javaVmArguments
-import java.util.concurrent.Callable
+import scala.tools.partest.nest.NestUI
+import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
-package partest {
- class TestState {
- def isOk = this eq TestState.Ok
- def isFail = this eq TestState.Fail
- def isTimeout = this eq TestState.Timeout
+package object partest {
+ type File = java.io.File
+ type SFile = scala.reflect.io.File
+ type Directory = scala.reflect.io.Directory
+ type Path = scala.reflect.io.Path
+ type PathResolver = scala.tools.util.PathResolver
+ type ClassPath[T] = scala.tools.nsc.util.ClassPath[T]
+ type StringWriter = java.io.StringWriter
+
+ val SFile = scala.reflect.io.File
+ val Directory = scala.reflect.io.Directory
+ val Path = scala.reflect.io.Path
+ val PathResolver = scala.tools.util.PathResolver
+ val ClassPath = scala.tools.nsc.util.ClassPath
+
+ val space = "\u0020"
+ val EOL = scala.compat.Platform.EOL
+ def onull(s: String) = if (s == null) "" else s
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString space
+ def nljoin(xs: String*): String = oempty(xs: _*) mkString EOL
+
+ implicit val codec = scala.io.Codec.UTF8
+
+ def setUncaughtHandler() = {
+ Thread.setDefaultUncaughtExceptionHandler(
+ new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, t: Throwable) {
+ val t1 = Exceptional unwrap t
+ System.err.println(s"Uncaught exception on thread $thread: $t1")
+ t1.printStackTrace()
+ }
+ }
+ )
+ }
+
+ /** Sources have a numerical group, specified by name_7 and so on. */
+ private val GroupPattern = """.*_(\d+)""".r
+
+ implicit class FileOps(val f: File) {
+ private def sf = SFile(f)
+
+ def testIdent = {
+ f.toString split """[/\\]+""" takeRight 2 mkString "/" // e.g. pos/t1234
+ }
+
+ def mapInPlace(mapFn: String => String)(filterFn: String => Boolean = _ => true): Unit =
+ writeAll(fileLines filter filterFn map (x => mapFn(x) + EOL): _*)
+
+ def appendAll(strings: String*): Unit = sf.appendAll(strings: _*)
+ def writeAll(strings: String*): Unit = sf.writeAll(strings: _*)
+ def absolutePathSegments: List[String] = f.getAbsolutePath split """[/\\]+""" toList
+
+ def isJava = f.isFile && (sf hasExtension "java")
+ def isScala = f.isFile && (sf hasExtension "scala")
+ def isJavaOrScala = isJava || isScala
+
+ def extension = sf.extension
+ def hasExtension(ext: String) = sf hasExtension ext
+ def changeExtension(ext: String): File = (sf changeExtension ext).jfile
+
+ /** The group number for this source file, or -1 for no group. */
+ def group: Int =
+ sf.stripExtension match {
+ case GroupPattern(g) if g.toInt >= 0 => g.toInt
+ case _ => -1
+ }
+
+ def fileContents: String = try sf.slurp() catch { case _: java.io.FileNotFoundException => "" }
+ def fileLines: List[String] = augmentString(fileContents).lines.toList
}
- object TestState {
- val Ok = new TestState
- val Fail = new TestState
- val Timeout = new TestState
+
+ implicit class PathOps(p: Path) extends FileOps(p.jfile) { }
+
+ implicit class Copier(val f: SFile) extends AnyVal {
+ def copyTo(dest: Path): Unit = dest.toFile writeAll f.slurp(scala.io.Codec.UTF8)
}
-}
-package object partest {
- import nest.NestUI
+ implicit class LoaderOps(val loader: ClassLoader) extends AnyVal {
+ import scala.util.control.Exception.catching
+ /** Like ScalaClassLoader.create for the case where the result type is
+ * available to the current class loader, implying that the current
+ * loader is a parent of `loader`.
+ */
+ def instantiate[A >: Null](name: String): A = (
+ catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
+ (loader loadClass name).newInstance.asInstanceOf[A] orNull
+ )
+ }
- implicit private[partest] def temporaryPath2File(x: Path): JFile = x.jfile
- implicit private[partest] def temporaryFile2Path(x: JFile): Path = Path(x)
+ implicit class ExecutorOps(val executor: ExecutorService) {
+ def awaitTermination[A](wait: Duration)(failing: => A = ()): Option[A] = (
+ if (executor awaitTermination (wait.length, wait.unit)) None
+ else Some(failing)
+ )
+ }
+
+ implicit def temporaryPath2File(x: Path): File = x.jfile
+ implicit def stringPathToJavaFile(path: String): File = new File(path)
implicit lazy val postfixOps = scala.language.postfixOps
implicit lazy val implicitConversions = scala.language.implicitConversions
+ def fileSeparator = java.io.File.separator
+ def pathSeparator = java.io.File.pathSeparator
+
+ def pathToTestIdent(path: Path) = path.jfile.testIdent
+
+ def canonicalizeSlashes(line: String) = line.replaceAll("""[/\\]+""", "/")
+
+ def words(s: String): List[String] = (s.trim split "\\s+").toList
+
def timed[T](body: => T): (T, Long) = {
val t1 = System.currentTimeMillis
val result = body
@@ -43,16 +132,40 @@ package object partest {
def callable[T](body: => T): Callable[T] = new Callable[T] { override def call() = body }
- def path2String(path: String) = file2String(new JFile(path))
- def file2String(f: JFile) =
- try SFile(f).slurp()
- catch { case _: FileNotFoundException => "" }
+ def file2String(f: File): String = f.fileContents
def basename(name: String): String = Path(name).stripExtension
- def resultsToStatistics(results: Iterable[(_, TestState)]): (Int, Int) = {
- val (files, failures) = results map (_._2 == TestState.Ok) partition (_ == true)
- (files.size, failures.size)
+ /** In order to allow for spaces in flags/options, this
+ * parses .flags, .javaopts, javacopts etc files as follows:
+ * If it is exactly one line, it is split (naively) on spaces.
+ * If it contains more than one line, each line is its own
+ * token, spaces and all.
+ */
+ def readOptionsFile(file: File): List[String] = {
+ file.fileLines match {
+ case x :: Nil => words(x)
+ case xs => xs map (_.trim)
+ }
+ }
+
+ def findProgram(name: String): Option[File] = {
+ val pathDirs = sys.env("PATH") match {
+ case null => List("/usr/local/bin", "/usr/bin", "/bin")
+ case path => path split "[:;]" filterNot (_ == "") toList
+ }
+ pathDirs.iterator map (d => new File(d, name)) find (_.canExecute)
+ }
+
+ def now = (new java.util.Date).toString
+ def elapsedString(millis: Long): String = {
+ val elapsedSecs = millis/1000
+ val elapsedMins = elapsedSecs/60
+ val elapsedHrs = elapsedMins/60
+ val dispMins = elapsedMins - elapsedHrs * 60
+ val dispSecs = elapsedSecs - elapsedMins * 60
+
+ "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs)
}
def vmArgString = javaVmArguments.mkString(
@@ -67,14 +180,10 @@ package object partest {
}
def showAllJVMInfo() {
- NestUI.verbose(vmArgString)
- NestUI.verbose(allPropertiesString)
+ vlog(vmArgString)
+ vlog(allPropertiesString)
}
- def isPartestDebug: Boolean =
- propOrEmpty("partest.debug") == "true"
-
-
import scala.language.experimental.macros
/**
@@ -123,4 +232,10 @@ package object partest {
a.tree))))),
a.tree))
}
+
+ def isPartestTerse = NestUI.isTerse
+ def isPartestDebug = NestUI.isDebug
+ def isPartestVerbose = NestUI.isVerbose
+
+ def vlog(msg: => String) = if (isPartestVerbose) System.err.println(msg)
}
diff --git a/src/partest/scala/tools/partest/utils/PrintMgr.scala b/src/partest/scala/tools/partest/utils/PrintMgr.scala
deleted file mode 100644
index d25be87c1e..0000000000
--- a/src/partest/scala/tools/partest/utils/PrintMgr.scala
+++ /dev/null
@@ -1,52 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala Parallel Testing **
-** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.tools.partest
-package utils
-
-/**
- * @author Thomas Hofer
- */
-object PrintMgr {
-
- val NONE = 0
- val SOME = 1
- val MANY = 2
-
- var outline = ""
- var success = ""
- var failure = ""
- var warning = ""
- var default = ""
-
- def initialization(number: Int) = number match {
- case MANY =>
- outline = Console.BOLD + Console.BLACK
- success = Console.BOLD + Console.GREEN
- failure = Console.BOLD + Console.RED
- warning = Console.BOLD + Console.YELLOW
- default = Console.RESET
- case SOME =>
- outline = Console.BOLD + Console.BLACK
- success = Console.RESET
- failure = Console.BOLD + Console.BLACK
- warning = Console.BOLD + Console.BLACK
- default = Console.RESET
- case _ =>
- }
-
- def printOutline(msg: String) = print(outline + msg + default)
-
- def printSuccess(msg: String) = print(success + msg + default)
-
- def printFailure(msg: String) = print(failure + msg + default)
-
- def printWarning(msg: String) = print(warning + msg + default)
-}
diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala
index 09eaf7afb4..e19e0cefad 100644
--- a/src/reflect/scala/reflect/api/Annotations.scala
+++ b/src/reflect/scala/reflect/api/Annotations.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import scala.collection.immutable.ListMap
@@ -45,12 +46,6 @@ trait Annotations { self: Universe =>
*/
type Annotation >: Null <: AnyRef with AnnotationApi
- /** A tag that preserves the identity of the `Annotation` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotationTag: ClassTag[Annotation]
-
/** The constructor/extractor for `Annotation` instances.
* @group Extractors
*/
@@ -90,11 +85,6 @@ trait Annotations { self: Universe =>
*/
type JavaArgument >: Null <: AnyRef
- /** A tag that preserves the identity of the `JavaArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val JavaArgumentTag: ClassTag[JavaArgument]
/** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")`
* @template
@@ -102,12 +92,6 @@ trait Annotations { self: Universe =>
*/
type LiteralArgument >: Null <: AnyRef with JavaArgument with LiteralArgumentApi
- /** A tag that preserves the identity of the `LiteralArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
-
/** The constructor/extractor for `LiteralArgument` instances.
* @group Extractors
*/
@@ -137,12 +121,6 @@ trait Annotations { self: Universe =>
*/
type ArrayArgument >: Null <: AnyRef with JavaArgument with ArrayArgumentApi
- /** A tag that preserves the identity of the `ArrayArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
-
/** The constructor/extractor for `ArrayArgument` instances.
* @group Extractors
*/
@@ -172,12 +150,6 @@ trait Annotations { self: Universe =>
*/
type NestedArgument >: Null <: AnyRef with JavaArgument with NestedArgumentApi
- /** A tag that preserves the identity of the `NestedArgument` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NestedArgumentTag: ClassTag[NestedArgument]
-
/** The constructor/extractor for `NestedArgument` instances.
* @group Extractors
*/
@@ -200,4 +172,4 @@ trait Annotations { self: Universe =>
/** The underlying nested annotation. */
def annotation: Annotation
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 0c8e81a220..2e95a01176 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -59,8 +60,6 @@ private[reflect] trait BuildUtils { self: Universe =>
def flagsFromBits(bits: Long): FlagSet
- def emptyValDef: ValDef
-
def This(sym: Symbol): Tree
def Select(qualifier: Tree, sym: Symbol): Select
diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala
index f3d75c3c00..c654961f4a 100644
--- a/src/reflect/scala/reflect/api/Constants.scala
+++ b/src/reflect/scala/reflect/api/Constants.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -183,12 +184,6 @@ trait Constants {
*/
type Constant >: Null <: AnyRef with ConstantApi
- /** A tag that preserves the identity of the `Constant` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ConstantTag: ClassTag[Constant]
-
/** The constructor/extractor for `Constant` instances.
* @group Extractors
*/
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 2ba18a8207..009d9dbfdb 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
import scala.reflect.runtime.{universe => ru}
@@ -174,4 +175,4 @@ private[scala] class SerializedExpr(var treec: TreeCreator, var tag: ru.WeakType
import ru._
Expr(rootMirror, treec)(tag)
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala
index 4357aec9c9..8a1d2f7f1d 100644
--- a/src/reflect/scala/reflect/api/FlagSets.scala
+++ b/src/reflect/scala/reflect/api/FlagSets.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import scala.language.implicitConversions
@@ -61,12 +62,6 @@ trait FlagSets { self: Universe =>
*/
type FlagSet
- /** A tag that preserves the identity of the `FlagSet` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FlagSetTag: ClassTag[FlagSet]
-
/** The API of `FlagSet` instances.
* The main source of information about flag sets is the [[scala.reflect.api.FlagSets]] page.
* @group Flags
diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala
new file mode 100644
index 0000000000..1b654a4a8d
--- /dev/null
+++ b/src/reflect/scala/reflect/api/ImplicitTags.scala
@@ -0,0 +1,117 @@
+package scala
+package reflect
+package api
+
+/** Tags which preserve the identity of abstract types in the face of erasure.
+ * Can be used for pattern matching, instance tests, serialization and the like.
+ * @group Tags
+ */
+trait ImplicitTags {
+ self: Universe =>
+
+ // Tags for Types.
+ implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
+ implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
+ implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
+ implicit val CompoundTypeTag: ClassTag[CompoundType]
+ implicit val ConstantTypeTag: ClassTag[ConstantType]
+ implicit val ExistentialTypeTag: ClassTag[ExistentialType]
+ implicit val MethodTypeTag: ClassTag[MethodType]
+ implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
+ implicit val PolyTypeTag: ClassTag[PolyType]
+ implicit val RefinedTypeTag: ClassTag[RefinedType]
+ implicit val SingleTypeTag: ClassTag[SingleType]
+ implicit val SingletonTypeTag: ClassTag[SingletonType]
+ implicit val SuperTypeTag: ClassTag[SuperType]
+ implicit val ThisTypeTag: ClassTag[ThisType]
+ implicit val TypeBoundsTag: ClassTag[TypeBounds]
+ implicit val TypeRefTag: ClassTag[TypeRef]
+ implicit val TypeTagg: ClassTag[Type]
+
+ // Tags for Names.
+ implicit val NameTag: ClassTag[Name]
+ implicit val TermNameTag: ClassTag[TermName]
+ implicit val TypeNameTag: ClassTag[TypeName]
+
+ // Tags for Scopes.
+ implicit val ScopeTag: ClassTag[Scope]
+ implicit val MemberScopeTag: ClassTag[MemberScope]
+
+ // Tags for Annotations.
+ implicit val AnnotationTag: ClassTag[Annotation]
+ implicit val JavaArgumentTag: ClassTag[JavaArgument]
+ implicit val LiteralArgumentTag: ClassTag[LiteralArgument]
+ implicit val ArrayArgumentTag: ClassTag[ArrayArgument]
+ implicit val NestedArgumentTag: ClassTag[NestedArgument]
+
+ // Tags for Symbols.
+ implicit val TermSymbolTag: ClassTag[TermSymbol]
+ implicit val MethodSymbolTag: ClassTag[MethodSymbol]
+ implicit val SymbolTag: ClassTag[Symbol]
+ implicit val TypeSymbolTag: ClassTag[TypeSymbol]
+ implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
+ implicit val ClassSymbolTag: ClassTag[ClassSymbol]
+ implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
+ implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
+
+ // Tags for misc Tree relatives.
+ implicit val PositionTag: ClassTag[Position]
+ implicit val ConstantTag: ClassTag[Constant]
+ implicit val FlagSetTag: ClassTag[FlagSet]
+ implicit val ModifiersTag: ClassTag[Modifiers]
+
+ // Tags for Trees. WTF.
+ implicit val AlternativeTag: ClassTag[Alternative]
+ implicit val AnnotatedTag: ClassTag[Annotated]
+ implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
+ implicit val ApplyTag: ClassTag[Apply]
+ implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
+ implicit val AssignTag: ClassTag[Assign]
+ implicit val BindTag: ClassTag[Bind]
+ implicit val BlockTag: ClassTag[Block]
+ implicit val CaseDefTag: ClassTag[CaseDef]
+ implicit val ClassDefTag: ClassTag[ClassDef]
+ implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
+ implicit val DefDefTag: ClassTag[DefDef]
+ implicit val DefTreeTag: ClassTag[DefTree]
+ implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
+ implicit val FunctionTag: ClassTag[Function]
+ implicit val GenericApplyTag: ClassTag[GenericApply]
+ implicit val IdentTag: ClassTag[Ident]
+ implicit val IfTag: ClassTag[If]
+ implicit val ImplDefTag: ClassTag[ImplDef]
+ implicit val ImportSelectorTag: ClassTag[ImportSelector]
+ implicit val ImportTag: ClassTag[Import]
+ implicit val LabelDefTag: ClassTag[LabelDef]
+ implicit val LiteralTag: ClassTag[Literal]
+ implicit val MatchTag: ClassTag[Match]
+ implicit val MemberDefTag: ClassTag[MemberDef]
+ implicit val ModuleDefTag: ClassTag[ModuleDef]
+ implicit val NameTreeTag: ClassTag[NameTree]
+ implicit val NewTag: ClassTag[New]
+ implicit val PackageDefTag: ClassTag[PackageDef]
+ implicit val RefTreeTag: ClassTag[RefTree]
+ implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
+ implicit val ReturnTag: ClassTag[Return]
+ implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
+ implicit val SelectTag: ClassTag[Select]
+ implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
+ implicit val StarTag: ClassTag[Star]
+ implicit val SuperTag: ClassTag[Super]
+ implicit val SymTreeTag: ClassTag[SymTree]
+ implicit val TemplateTag: ClassTag[Template]
+ implicit val TermTreeTag: ClassTag[TermTree]
+ implicit val ThisTag: ClassTag[This]
+ implicit val ThrowTag: ClassTag[Throw]
+ implicit val TreeTag: ClassTag[Tree]
+ implicit val TryTag: ClassTag[Try]
+ implicit val TypTreeTag: ClassTag[TypTree]
+ implicit val TypeApplyTag: ClassTag[TypeApply]
+ implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
+ implicit val TypeDefTag: ClassTag[TypeDef]
+ implicit val TypeTreeTag: ClassTag[TypeTree]
+ implicit val TypedTag: ClassTag[Typed]
+ implicit val UnApplyTag: ClassTag[UnApply]
+ implicit val ValDefTag: ClassTag[ValDef]
+ implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
+}
diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala
index afc4f2f25d..e6f314b712 100644
--- a/src/reflect/scala/reflect/api/Importers.scala
+++ b/src/reflect/scala/reflect/api/Importers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/JavaMirrors.scala b/src/reflect/scala/reflect/api/JavaMirrors.scala
index b678033e1a..23abc23eb9 100644
--- a/src/reflect/scala/reflect/api/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/api/JavaMirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala
index 04d091ee9d..6fc76c9693 100644
--- a/src/reflect/scala/reflect/api/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/api/JavaUniverse.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala
index 1223326d7c..e0219c9074 100644
--- a/src/reflect/scala/reflect/api/Mirror.scala
+++ b/src/reflect/scala/reflect/api/Mirror.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -49,16 +50,16 @@ abstract class Mirror[U <: Universe with Singleton] {
* If you need a symbol that corresponds to the type alias itself, load it directly from the package class:
*
* scala> cm.staticClass("scala.List")
- * res0: reflect.runtime.universe.ClassSymbol = class List
+ * res0: scala.reflect.runtime.universe.ClassSymbol = class List
*
* scala> res0.fullName
* res1: String = scala.collection.immutable.List
*
* scala> cm.staticPackage("scala")
- * res2: reflect.runtime.universe.ModuleSymbol = package scala
+ * res2: scala.reflect.runtime.universe.ModuleSymbol = package scala
*
* scala> res2.moduleClass.typeSignature member newTypeName("List")
- * res3: reflect.runtime.universe.Symbol = type List
+ * res3: scala.reflect.runtime.universe.Symbol = type List
*
* scala> res3.fullName
* res4: String = scala.List
diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala
index 76a75940ff..d702555ba6 100644
--- a/src/reflect/scala/reflect/api/Mirrors.scala
+++ b/src/reflect/scala/reflect/api/Mirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -350,6 +351,11 @@ trait Mirrors { self: Universe =>
* the value of the base field. To achieve overriding behavior, use reflectMethod on an accessor.
*/
def set(value: Any): Unit
+
+ /** Creates a new mirror which uses the same symbol, but is bound to a different receiver.
+ * This is significantly faster than recreating the mirror from scratch.
+ */
+ def bind(newReceiver: Any): FieldMirror
}
/** A mirror that reflects a method.
@@ -371,6 +377,11 @@ trait Mirrors { self: Universe =>
* with invoking the corresponding method or constructor.
*/
def apply(args: Any*): Any
+
+ /** Creates a new mirror which uses the same symbol, but is bound to a different receiver.
+ * This is significantly faster than recreating the mirror from scratch.
+ */
+ def bind(newReceiver: Any): MethodMirror
}
/** A mirror that reflects the instance or static parts of a runtime class.
diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala
index 7c12f180a8..f74e0ce014 100644
--- a/src/reflect/scala/reflect/api/Names.scala
+++ b/src/reflect/scala/reflect/api/Names.scala
@@ -1,6 +1,9 @@
-package scala.reflect
+package scala
+package reflect
package api
+import scala.language.implicitConversions
+
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
@@ -43,42 +46,24 @@ trait Names {
*/
type Name >: Null <: NameApi
- /** A tag that preserves the identity of the `Name` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NameTag: ClassTag[Name]
-
/** The abstract type of names representing terms.
* @group Names
*/
type TypeName >: Null <: Name
- /** A tag that preserves the identity of the `TypeName` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
-implicit val TypeNameTag: ClassTag[TypeName]
-
/** The abstract type of names representing types.
* @group Names
*/
type TermName >: Null <: Name
- /** A tag that preserves the identity of the `TermName` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TermNameTag: ClassTag[TermName]
-
/** The API of Name instances.
* @group API
*/
abstract class NameApi {
- /** Checks wether the name is a a term name */
+ /** Checks wether the name is a term name */
def isTermName: Boolean
- /** Checks wether the name is a a type name */
+ /** Checks wether the name is a type name */
def isTypeName: Boolean
/** Returns a term name that wraps the same string as `this` */
@@ -109,10 +94,38 @@ implicit val TypeNameTag: ClassTag[TypeName]
/** Create a new term name.
* @group Names
*/
+ @deprecated("Use TermName instead", "2.11.0")
def newTermName(s: String): TermName
/** Creates a new type name.
* @group Names
*/
+ @deprecated("Use TypeName instead", "2.11.0")
def newTypeName(s: String): TypeName
+
+ /** The constructor/extractor for `TermName` instances.
+ * @group Extractors
+ */
+ val TermName: TermNameExtractor
+
+ /** An extractor class to create and pattern match with syntax `TermName(s)`.
+ * @group Extractors
+ */
+ abstract class TermNameExtractor {
+ def apply(s: String): TermName
+ def unapply(name: TermName): Option[String]
+ }
+
+ /** The constructor/extractor for `TypeName` instances.
+ * @group Extractors
+ */
+ val TypeName: TypeNameExtractor
+
+ /** An extractor class to create and pattern match with syntax `TypeName(s)`.
+ * @group Extractors
+ */
+ abstract class TypeNameExtractor {
+ def apply(s: String): TypeName
+ def unapply(name: TypeName): Option[String]
+ }
}
diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala
index 63c67627a3..4d5f1f6e67 100644
--- a/src/reflect/scala/reflect/api/Position.scala
+++ b/src/reflect/scala/reflect/api/Position.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import scala.reflect.macros.Attachments
diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala
index 87f00fdb88..8ad46418f8 100644
--- a/src/reflect/scala/reflect/api/Positions.scala
+++ b/src/reflect/scala/reflect/api/Positions.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -19,13 +20,6 @@ trait Positions {
* @group Positions
*/
type Position >: Null <: scala.reflect.api.Position { type Pos = Position }
-
- /** A tag that preserves the identity of the `Position` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val PositionTag: ClassTag[Position]
-
/** A special "missing" position.
* @group Positions
*/
diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala
index 85ddcc6523..81d30dec1e 100644
--- a/src/reflect/scala/reflect/api/Printers.scala
+++ b/src/reflect/scala/reflect/api/Printers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
import java.io.{ PrintWriter, StringWriter }
@@ -143,6 +144,7 @@ trait Printers { self: Universe =>
protected var printIds = false
protected var printKinds = false
protected var printMirrors = false
+ protected var printPositions = false
def withTypes: this.type = { printTypes = true; this }
def withoutTypes: this.type = { printTypes = false; this }
def withIds: this.type = { printIds = true; this }
@@ -151,43 +153,48 @@ trait Printers { self: Universe =>
def withoutKinds: this.type = { printKinds = false; this }
def withMirrors: this.type = { printMirrors = true; this }
def withoutMirrors: this.type = { printMirrors = false; this }
+ def withPositions: this.type = { printPositions = true; this }
+ def withoutPositions: this.type = { printPositions = false; this }
}
/** @group Printers */
- case class BooleanFlag(val value: Option[Boolean])
+ case class BooleanFlag(value: Option[Boolean])
/** @group Printers */
object BooleanFlag {
import scala.language.implicitConversions
implicit def booleanToBooleanFlag(value: Boolean): BooleanFlag = BooleanFlag(Some(value))
implicit def optionToBooleanFlag(value: Option[Boolean]): BooleanFlag = BooleanFlag(value)
+ import scala.reflect.internal.settings.MutableSettings
+ implicit def settingToBooleanFlag(setting: MutableSettings#BooleanSetting): BooleanFlag = BooleanFlag(Some(setting.value))
}
/** @group Printers */
- protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String = {
+ protected def render(what: Any, mkPrinter: PrintWriter => TreePrinter, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String = {
val buffer = new StringWriter()
val writer = new PrintWriter(buffer)
- var printer = mkPrinter(writer)
+ val printer = mkPrinter(writer)
printTypes.value.map(printTypes => if (printTypes) printer.withTypes else printer.withoutTypes)
printIds.value.map(printIds => if (printIds) printer.withIds else printer.withoutIds)
printKinds.value.map(printKinds => if (printKinds) printer.withKinds else printer.withoutKinds)
printMirrors.value.map(printMirrors => if (printMirrors) printer.withMirrors else printer.withoutMirrors)
+ printPositions.value.map(printPositions => if (printPositions) printer.withPositions else printer.withoutPositions)
printer.print(what)
writer.flush()
buffer.toString
}
/** By default trees are printed with `show`
- * @group Printers
+ * @group Printers
*/
override protected def treeToString(tree: Tree) = show(tree)
/** Renders a representation of a reflection artifact
- * as desugared Java code.
+ * as desugared Scala code.
*
- * @group Printers
+ * @group Printers
*/
- def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
- render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+ def show(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
+ render(any, newTreePrinter(_), printTypes, printIds, printKinds, printMirrors, printPositions)
/** Hook to define what `show(...)` means.
* @group Printers
@@ -195,12 +202,12 @@ trait Printers { self: Universe =>
protected def newTreePrinter(out: PrintWriter): TreePrinter
/** Renders internal structure of a reflection artifact as the
- * visualization of a Scala syntax tree.
+ * visualization of a Scala syntax tree.
*
- * @group Printers
+ * @group Printers
*/
- def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None): String =
- render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors)
+ def showRaw(any: Any, printTypes: BooleanFlag = None, printIds: BooleanFlag = None, printKinds: BooleanFlag = None, printMirrors: BooleanFlag = None, printPositions: BooleanFlag = None): String =
+ render(any, newRawTreePrinter(_), printTypes, printIds, printKinds, printMirrors, printPositions)
/** Hook to define what `showRaw(...)` means.
* @group Printers
diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala
index 7f9799393c..2eb477f652 100644
--- a/src/reflect/scala/reflect/api/Scopes.scala
+++ b/src/reflect/scala/reflect/api/Scopes.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -33,12 +34,6 @@ trait Scopes { self: Universe =>
*/
trait ScopeApi extends Iterable[Symbol]
- /** A tag that preserves the identity of the `Scope` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ScopeTag: ClassTag[Scope]
-
/** Create a new scope with the given initial elements.
* @group Scopes
*/
@@ -61,10 +56,4 @@ trait Scopes { self: Universe =>
*/
def sorted: List[Symbol]
}
-
- /** A tag that preserves the identity of the `MemberScope` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MemberScopeTag: ClassTag[MemberScope]
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala
index 721b0bc7f2..bbfebcb434 100644
--- a/src/reflect/scala/reflect/api/StandardDefinitions.scala
+++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala
index 4886e4f8f7..aec5f19fa0 100644
--- a/src/reflect/scala/reflect/api/StandardNames.scala
+++ b/src/reflect/scala/reflect/api/StandardNames.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
// Q: I have a pretty name. Can I put it here?
@@ -84,6 +85,11 @@ trait StandardNames {
*/
val ROOTPKG: NameType
+ /** The term name `<empty>`.
+ * Represents the empty package.
+ */
+ val EMPTY_PACKAGE_NAME: NameType
+
/** The string " " (a single whitespace).
* `LOCAL_SUFFIX_STRING` is appended to the names of local identifiers,
* when it's necessary to prevent a naming conflict. For example, underlying fields
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index c8e03f1d91..1250545497 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -61,12 +62,6 @@ trait Symbols { self: Universe =>
*/
type Symbol >: Null <: SymbolApi
- /** A tag that preserves the identity of the `Symbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SymbolTag: ClassTag[Symbol]
-
/** The type of type symbols representing type, class, and trait declarations,
* as well as type parameters.
* @group Symbols
@@ -74,12 +69,6 @@ trait Symbols { self: Universe =>
*/
type TypeSymbol >: Null <: Symbol with TypeSymbolApi
- /** A tag that preserves the identity of the `TypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeSymbolTag: ClassTag[TypeSymbol]
-
/** The type of term symbols representing val, var, def, and object declarations as
* well as packages and value parameters.
* @group Symbols
@@ -87,72 +76,36 @@ trait Symbols { self: Universe =>
*/
type TermSymbol >: Null <: Symbol with TermSymbolApi
- /** A tag that preserves the identity of the `TermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TermSymbolTag: ClassTag[TermSymbol]
-
/** The type of method symbols representing def declarations.
* @group Symbols
* @template
*/
type MethodSymbol >: Null <: TermSymbol with MethodSymbolApi
- /** A tag that preserves the identity of the `MethodSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MethodSymbolTag: ClassTag[MethodSymbol]
-
/** The type of module symbols representing object declarations.
* @group Symbols
* @template
*/
type ModuleSymbol >: Null <: TermSymbol with ModuleSymbolApi
- /** A tag that preserves the identity of the `ModuleSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ModuleSymbolTag: ClassTag[ModuleSymbol]
-
/** The type of class symbols representing class and trait definitions.
* @group Symbols
* @template
*/
type ClassSymbol >: Null <: TypeSymbol with ClassSymbolApi
- /** A tag that preserves the identity of the `ClassSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ClassSymbolTag: ClassTag[ClassSymbol]
-
/** The type of free terms introduced by reification.
* @group Symbols
* @template
*/
type FreeTermSymbol >: Null <: TermSymbol with FreeTermSymbolApi
- /** A tag that preserves the identity of the `FreeTermSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FreeTermSymbolTag: ClassTag[FreeTermSymbol]
-
/** The type of free types introduced by reification.
* @group Symbols
* @template
*/
type FreeTypeSymbol >: Null <: TypeSymbol with FreeTypeSymbolApi
- /** A tag that preserves the identity of the `FreeTypeSymbol` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FreeTypeSymbolTag: ClassTag[FreeTypeSymbol]
-
/** A special "missing" symbol. Commonly used in the API to denote a default or empty value.
* @group Symbols
* @template
@@ -245,7 +198,7 @@ trait Symbols { self: Universe =>
/** Does this symbol represent the definition of a term?
* Note that every symbol is either a term or a type.
* So for every symbol `sym` (except for `NoSymbol`),
- * either `sym.isTerm` is true or `sym.isTerm` is true.
+ * either `sym.isTerm` is true or `sym.isType` is true.
*
* @group Tests
*/
diff --git a/src/reflect/scala/reflect/api/TagInterop.scala b/src/reflect/scala/reflect/api/TagInterop.scala
index 5de811578e..51b7c519c5 100644
--- a/src/reflect/scala/reflect/api/TagInterop.scala
+++ b/src/reflect/scala/reflect/api/TagInterop.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala
index 6969418470..027c840955 100644
--- a/src/reflect/scala/reflect/api/TreeCreator.scala
+++ b/src/reflect/scala/reflect/api/TreeCreator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/** This is an internal implementation class.
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 0937a93738..f7a6a68946 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -60,12 +61,6 @@ trait Trees { self: Universe =>
*/
type Tree >: Null <: TreeApi
- /** A tag that preserves the identity of the `Tree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TreeTag: ClassTag[Tree]
-
/** The API that all trees support.
* The main source of information about trees is the [[scala.reflect.api.Trees]] page.
* @group API
@@ -75,11 +70,32 @@ trait Trees { self: Universe =>
def isDef: Boolean
/** Is this tree one of the empty trees?
+ *
* Empty trees are: the `EmptyTree` null object, `TypeTree` instances that don't carry a type
* and the special `emptyValDef` singleton.
+ *
+ * In the compiler the `isEmpty` check and the derived `orElse` method are mostly used
+ * as a check for a tree being a null object (`EmptyTree` for term trees and empty TypeTree for type trees).
+ *
+ * Unfortunately `emptyValDef` is also considered to be `isEmpty`, but this is deemed to be
+ * a conceptual mistake pending a fix in https://issues.scala-lang.org/browse/SI-6762.
+ *
+ * @see `canHaveAttrs`
*/
def isEmpty: Boolean
+ /** Is this tree one of the empty trees?
+ *
+ * @see `isEmpty`
+ */
+ def nonEmpty: Boolean
+
+ /** Can this tree carry attributes (i.e. symbols, types or positions)?
+ * Typically the answer is yes, except for the `EmptyTree` null object and
+ * two special singletons: `emptyValDef` and `pendingSuperCall`.
+ */
+ def canHaveAttrs: Boolean
+
/** The canonical way to test if a Tree represents a term.
*/
def isTerm: Boolean
@@ -209,12 +225,6 @@ trait Trees { self: Universe =>
*/
type TermTree >: Null <: AnyRef with Tree with TermTreeApi
- /** A tag that preserves the identity of the `TermTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TermTreeTag: ClassTag[TermTree]
-
/** The API that all term trees support
* @group API
*/
@@ -228,12 +238,6 @@ trait Trees { self: Universe =>
*/
type TypTree >: Null <: AnyRef with Tree with TypTreeApi
- /** A tag that preserves the identity of the `TypTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypTreeTag: ClassTag[TypTree]
-
/** The API that all typ trees support
* @group API
*/
@@ -246,12 +250,6 @@ trait Trees { self: Universe =>
*/
type SymTree >: Null <: AnyRef with Tree with SymTreeApi
- /** A tag that preserves the identity of the `SymTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SymTreeTag: ClassTag[SymTree]
-
/** The API that all sym trees support
* @group API
*/
@@ -266,12 +264,6 @@ trait Trees { self: Universe =>
*/
type NameTree >: Null <: AnyRef with Tree with NameTreeApi
- /** A tag that preserves the identity of the `NameTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NameTreeTag: ClassTag[NameTree]
-
/** The API that all name trees support
* @group API
*/
@@ -290,12 +282,6 @@ trait Trees { self: Universe =>
*/
type RefTree >: Null <: SymTree with NameTree with RefTreeApi
- /** A tag that preserves the identity of the `RefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val RefTreeTag: ClassTag[RefTree]
-
/** The API that all ref trees support
* @group API
*/
@@ -310,18 +296,26 @@ trait Trees { self: Universe =>
def name: Name
}
+ /** The constructor/extractor for `RefTree` instances.
+ * @group Extractors
+ */
+ val RefTree: RefTreeExtractor
+
+ /** An extractor class to create and pattern match with syntax `RefTree(qual, name)`.
+ * This AST node corresponds to either Ident, Select or SelectFromTypeTree.
+ * @group Extractors
+ */
+ abstract class RefTreeExtractor {
+ def apply(qualifier: Tree, name: Name): RefTree
+ def unapply(refTree: RefTree): Option[(Tree, Name)]
+ }
+
/** A tree which defines a symbol-carrying entity.
* @group Trees
* @template
*/
type DefTree >: Null <: SymTree with NameTree with DefTreeApi
- /** A tag that preserves the identity of the `DefTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val DefTreeTag: ClassTag[DefTree]
-
/** The API that all def trees support
* @group API
*/
@@ -337,12 +331,6 @@ trait Trees { self: Universe =>
*/
type MemberDef >: Null <: DefTree with MemberDefApi
- /** A tag that preserves the identity of the `MemberDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MemberDefTag: ClassTag[MemberDef]
-
/** The API that all member defs support
* @group API
*/
@@ -357,12 +345,6 @@ trait Trees { self: Universe =>
*/
type PackageDef >: Null <: MemberDef with PackageDefApi
- /** A tag that preserves the identity of the `PackageDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val PackageDefTag: ClassTag[PackageDef]
-
/** The constructor/extractor for `PackageDef` instances.
* @group Extractors
*/
@@ -396,12 +378,6 @@ trait Trees { self: Universe =>
*/
type ImplDef >: Null <: MemberDef with ImplDefApi
- /** A tag that preserves the identity of the `ImplDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ImplDefTag: ClassTag[ImplDef]
-
/** The API that all impl defs support
* @group API
*/
@@ -416,12 +392,6 @@ trait Trees { self: Universe =>
*/
type ClassDef >: Null <: ImplDef with ClassDefApi
- /** A tag that preserves the identity of the `ClassDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ClassDefTag: ClassTag[ClassDef]
-
/** The constructor/extractor for `ClassDef` instances.
* @group Extractors
*/
@@ -467,12 +437,6 @@ trait Trees { self: Universe =>
*/
type ModuleDef >: Null <: ImplDef with ModuleDefApi
- /** A tag that preserves the identity of the `ModuleDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ModuleDefTag: ClassTag[ModuleDef]
-
/** The constructor/extractor for `ModuleDef` instances.
* @group Extractors
*/
@@ -513,12 +477,6 @@ trait Trees { self: Universe =>
*/
type ValOrDefDef >: Null <: MemberDef with ValOrDefDefApi
- /** A tag that preserves the identity of the `ValOrDefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ValOrDefDefTag: ClassTag[ValOrDefDef]
-
/** The API that all val defs and def defs support
* @group API
*/
@@ -550,12 +508,6 @@ trait Trees { self: Universe =>
*/
type ValDef >: Null <: ValOrDefDef with ValDefApi
- /** A tag that preserves the identity of the `ValDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ValDefTag: ClassTag[ValDef]
-
/** The constructor/extractor for `ValDef` instances.
* @group Extractors
*/
@@ -605,12 +557,6 @@ trait Trees { self: Universe =>
*/
type DefDef >: Null <: ValOrDefDef with DefDefApi
- /** A tag that preserves the identity of the `DefDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val DefDefTag: ClassTag[DefDef]
-
/** The constructor/extractor for `DefDef` instances.
* @group Extractors
*/
@@ -660,12 +606,6 @@ trait Trees { self: Universe =>
*/
type TypeDef >: Null <: MemberDef with TypeDefApi
- /** A tag that preserves the identity of the `TypeDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeDefTag: ClassTag[TypeDef]
-
/** The constructor/extractor for `TypeDef` instances.
* @group Extractors
*/
@@ -725,12 +665,6 @@ trait Trees { self: Universe =>
*/
type LabelDef >: Null <: DefTree with TermTree with LabelDefApi
- /** A tag that preserves the identity of the `LabelDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val LabelDefTag: ClassTag[LabelDef]
-
/** The constructor/extractor for `LabelDef` instances.
* @group Extractors
*/
@@ -787,12 +721,6 @@ trait Trees { self: Universe =>
*/
type ImportSelector >: Null <: AnyRef with ImportSelectorApi
- /** A tag that preserves the identity of the `ImportSelector` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ImportSelectorTag: ClassTag[ImportSelector]
-
/** The constructor/extractor for `ImportSelector` instances.
* @group Extractors
*/
@@ -839,12 +767,6 @@ trait Trees { self: Universe =>
*/
type Import >: Null <: SymTree with ImportApi
- /** A tag that preserves the identity of the `Import` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ImportTag: ClassTag[Import]
-
/** The constructor/extractor for `Import` instances.
* @group Extractors
*/
@@ -897,12 +819,6 @@ trait Trees { self: Universe =>
*/
type Template >: Null <: SymTree with TemplateApi
- /** A tag that preserves the identity of the `Template` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TemplateTag: ClassTag[Template]
-
/** The constructor/extractor for `Template` instances.
* @group Extractors
*/
@@ -955,12 +871,6 @@ trait Trees { self: Universe =>
*/
type Block >: Null <: TermTree with BlockApi
- /** A tag that preserves the identity of the `Block` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BlockTag: ClassTag[Block]
-
/** The constructor/extractor for `Block` instances.
* @group Extractors
*/
@@ -1000,12 +910,6 @@ trait Trees { self: Universe =>
*/
type CaseDef >: Null <: AnyRef with Tree with CaseDefApi
- /** A tag that preserves the identity of the `CaseDef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CaseDefTag: ClassTag[CaseDef]
-
/** The constructor/extractor for `CaseDef` instances.
* @group Extractors
*/
@@ -1017,7 +921,7 @@ trait Trees { self: Universe =>
* `case` pat `if` guard => body
*
* If the guard is not present, the `guard` is set to `EmptyTree`.
- * If the body is not specified, the `body` is set to `Literal(Constant())`
+ * If the body is not specified, the `body` is set to `Literal(Constant(()))`
* @group Extractors
*/
abstract class CaseDefExtractor {
@@ -1038,7 +942,7 @@ trait Trees { self: Universe =>
def guard: Tree
/** The body of the pattern matching clause.
- * Is equal to `Literal(Constant())` if the body is not specified.
+ * Is equal to `Literal(Constant(()))` if the body is not specified.
*/
def body: Tree
}
@@ -1053,12 +957,6 @@ trait Trees { self: Universe =>
*/
type Alternative >: Null <: TermTree with AlternativeApi
- /** A tag that preserves the identity of the `Alternative` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AlternativeTag: ClassTag[Alternative]
-
/** The constructor/extractor for `Alternative` instances.
* @group Extractors
*/
@@ -1091,12 +989,6 @@ trait Trees { self: Universe =>
*/
type Star >: Null <: TermTree with StarApi
- /** A tag that preserves the identity of the `Star` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val StarTag: ClassTag[Star]
-
/** The constructor/extractor for `Star` instances.
* @group Extractors
*/
@@ -1132,12 +1024,6 @@ trait Trees { self: Universe =>
*/
type Bind >: Null <: DefTree with BindApi
- /** A tag that preserves the identity of the `Bind` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BindTag: ClassTag[Bind]
-
/** The constructor/extractor for `Bind` instances.
* @group Extractors
*/
@@ -1201,12 +1087,6 @@ trait Trees { self: Universe =>
*/
type UnApply >: Null <: TermTree with UnApplyApi
- /** A tag that preserves the identity of the `UnApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val UnApplyTag: ClassTag[UnApply]
-
/** The constructor/extractor for `UnApply` instances.
* @group Extractors
*/
@@ -1243,12 +1123,6 @@ trait Trees { self: Universe =>
*/
type Function >: Null <: TermTree with SymTree with FunctionApi
- /** A tag that preserves the identity of the `Function` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val FunctionTag: ClassTag[Function]
-
/** The constructor/extractor for `Function` instances.
* @group Extractors
*/
@@ -1287,12 +1161,6 @@ trait Trees { self: Universe =>
*/
type Assign >: Null <: TermTree with AssignApi
- /** A tag that preserves the identity of the `Assign` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AssignTag: ClassTag[Assign]
-
/** The constructor/extractor for `Assign` instances.
* @group Extractors
*/
@@ -1329,12 +1197,6 @@ trait Trees { self: Universe =>
*/
type AssignOrNamedArg >: Null <: TermTree with AssignOrNamedArgApi
- /** A tag that preserves the identity of the `AssignOrNamedArg` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AssignOrNamedArgTag: ClassTag[AssignOrNamedArg]
-
/** The constructor/extractor for `AssignOrNamedArg` instances.
* @group Extractors
*/
@@ -1376,12 +1238,6 @@ trait Trees { self: Universe =>
*/
type If >: Null <: TermTree with IfApi
- /** A tag that preserves the identity of the `If` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val IfTag: ClassTag[If]
-
/** The constructor/extractor for `If` instances.
* @group Extractors
*/
@@ -1433,12 +1289,6 @@ trait Trees { self: Universe =>
*/
type Match >: Null <: TermTree with MatchApi
- /** A tag that preserves the identity of the `Match` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MatchTag: ClassTag[Match]
-
/** The constructor/extractor for `Match` instances.
* @group Extractors
*/
@@ -1474,12 +1324,6 @@ trait Trees { self: Universe =>
*/
type Return >: Null <: TermTree with SymTree with ReturnApi
- /** A tag that preserves the identity of the `Return` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ReturnTag: ClassTag[Return]
-
/** The constructor/extractor for `Return` instances.
* @group Extractors
*/
@@ -1512,12 +1356,6 @@ trait Trees { self: Universe =>
*/
type Try >: Null <: TermTree with TryApi
- /** A tag that preserves the identity of the `Try` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TryTag: ClassTag[Try]
-
/** The constructor/extractor for `Try` instances.
* @group Extractors
*/
@@ -1556,12 +1394,6 @@ trait Trees { self: Universe =>
*/
type Throw >: Null <: TermTree with ThrowApi
- /** A tag that preserves the identity of the `Throw` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThrowTag: ClassTag[Throw]
-
/** The constructor/extractor for `Throw` instances.
* @group Extractors
*/
@@ -1592,12 +1424,6 @@ trait Trees { self: Universe =>
*/
type New >: Null <: TermTree with NewApi
- /** A tag that preserves the identity of the `New` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NewTag: ClassTag[New]
-
/** The constructor/extractor for `New` instances.
* @group Extractors
*/
@@ -1648,12 +1474,6 @@ trait Trees { self: Universe =>
*/
type Typed >: Null <: TermTree with TypedApi
- /** A tag that preserves the identity of the `Typed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypedTag: ClassTag[Typed]
-
/** The constructor/extractor for `Typed` instances.
* @group Extractors
*/
@@ -1687,12 +1507,6 @@ trait Trees { self: Universe =>
*/
type GenericApply >: Null <: TermTree with GenericApplyApi
- /** A tag that preserves the identity of the `GenericApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val GenericApplyTag: ClassTag[GenericApply]
-
/** The API that all applies support
* @group API
*/
@@ -1714,12 +1528,6 @@ trait Trees { self: Universe =>
*/
type TypeApply >: Null <: GenericApply with TypeApplyApi
- /** A tag that preserves the identity of the `TypeApply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeApplyTag: ClassTag[TypeApply]
-
/** The constructor/extractor for `TypeApply` instances.
* @group Extractors
*/
@@ -1758,12 +1566,6 @@ trait Trees { self: Universe =>
*/
type Apply >: Null <: GenericApply with ApplyApi
- /** A tag that preserves the identity of the `Apply` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ApplyTag: ClassTag[Apply]
-
/** The constructor/extractor for `Apply` instances.
* @group Extractors
*/
@@ -1801,12 +1603,6 @@ trait Trees { self: Universe =>
*/
type Super >: Null <: TermTree with SuperApi
- /** A tag that preserves the identity of the `Super` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SuperTag: ClassTag[Super]
-
/** The constructor/extractor for `Super` instances.
* @group Extractors
*/
@@ -1853,12 +1649,6 @@ trait Trees { self: Universe =>
*/
type This >: Null <: TermTree with SymTree with ThisApi
- /** A tag that preserves the identity of the `This` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThisTag: ClassTag[This]
-
/** The constructor/extractor for `This` instances.
* @group Extractors
*/
@@ -1894,12 +1684,6 @@ trait Trees { self: Universe =>
*/
type Select >: Null <: RefTree with SelectApi
- /** A tag that preserves the identity of the `Select` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SelectTag: ClassTag[Select]
-
/** The constructor/extractor for `Select` instances.
* @group Extractors
*/
@@ -1939,12 +1723,6 @@ trait Trees { self: Universe =>
*/
type Ident >: Null <: RefTree with IdentApi
- /** A tag that preserves the identity of the `Ident` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val IdentTag: ClassTag[Ident]
-
/** The constructor/extractor for `Ident` instances.
* @group Extractors
*/
@@ -1984,12 +1762,6 @@ trait Trees { self: Universe =>
*/
type ReferenceToBoxed >: Null <: TermTree with ReferenceToBoxedApi
- /** A tag that preserves the identity of the `ReferenceToBoxed` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ReferenceToBoxedTag: ClassTag[ReferenceToBoxed]
-
/** The constructor/extractor for `ReferenceToBoxed` instances.
* @group Extractors
*/
@@ -2034,12 +1806,6 @@ trait Trees { self: Universe =>
*/
type Literal >: Null <: TermTree with LiteralApi
- /** A tag that preserves the identity of the `Literal` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val LiteralTag: ClassTag[Literal]
-
/** The constructor/extractor for `Literal` instances.
* @group Extractors
*/
@@ -2073,12 +1839,6 @@ trait Trees { self: Universe =>
*/
type Annotated >: Null <: AnyRef with Tree with AnnotatedApi
- /** A tag that preserves the identity of the `Annotated` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotatedTag: ClassTag[Annotated]
-
/** The constructor/extractor for `Annotated` instances.
* @group Extractors
*/
@@ -2113,12 +1873,6 @@ trait Trees { self: Universe =>
*/
type SingletonTypeTree >: Null <: TypTree with SingletonTypeTreeApi
- /** A tag that preserves the identity of the `SingletonTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingletonTypeTreeTag: ClassTag[SingletonTypeTree]
-
/** The constructor/extractor for `SingletonTypeTree` instances.
* @group Extractors
*/
@@ -2149,12 +1903,6 @@ trait Trees { self: Universe =>
*/
type SelectFromTypeTree >: Null <: TypTree with RefTree with SelectFromTypeTreeApi
- /** A tag that preserves the identity of the `SelectFromTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SelectFromTypeTreeTag: ClassTag[SelectFromTypeTree]
-
/** The constructor/extractor for `SelectFromTypeTree` instances.
* @group Extractors
*/
@@ -2196,12 +1944,6 @@ trait Trees { self: Universe =>
*/
type CompoundTypeTree >: Null <: TypTree with CompoundTypeTreeApi
- /** A tag that preserves the identity of the `CompoundTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CompoundTypeTreeTag: ClassTag[CompoundTypeTree]
-
/** The constructor/extractor for `CompoundTypeTree` instances.
* @group Extractors
*/
@@ -2232,12 +1974,6 @@ trait Trees { self: Universe =>
*/
type AppliedTypeTree >: Null <: TypTree with AppliedTypeTreeApi
- /** A tag that preserves the identity of the `AppliedTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AppliedTypeTreeTag: ClassTag[AppliedTypeTree]
-
/** The constructor/extractor for `AppliedTypeTree` instances.
* @group Extractors
*/
@@ -2280,12 +2016,6 @@ trait Trees { self: Universe =>
*/
type TypeBoundsTree >: Null <: TypTree with TypeBoundsTreeApi
- /** A tag that preserves the identity of the `TypeBoundsTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeBoundsTreeTag: ClassTag[TypeBoundsTree]
-
/** The constructor/extractor for `TypeBoundsTree` instances.
* @group Extractors
*/
@@ -2323,12 +2053,6 @@ trait Trees { self: Universe =>
*/
type ExistentialTypeTree >: Null <: TypTree with ExistentialTypeTreeApi
- /** A tag that preserves the identity of the `ExistentialTypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ExistentialTypeTreeTag: ClassTag[ExistentialTypeTree]
-
/** The constructor/extractor for `ExistentialTypeTree` instances.
* @group Extractors
*/
@@ -2366,12 +2090,6 @@ trait Trees { self: Universe =>
*/
type TypeTree >: Null <: TypTree with TypeTreeApi
- /** A tag that preserves the identity of the `TypeTree` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeTreeTag: ClassTag[TypeTree]
-
/** The constructor/extractor for `TypeTree` instances.
* @group Extractors
*/
@@ -2405,6 +2123,15 @@ trait Trees { self: Universe =>
*/
val emptyValDef: ValDef
+ /** An empty superclass constructor call corresponding to:
+ * super.<init>()
+ * This is used as a placeholder in the primary constructor body in class templates
+ * to denote the insertion point of a call to superclass constructor after the typechecker
+ * figures out the superclass of a given template.
+ * @group Trees
+ */
+ val pendingSuperCall: Apply
+
// ---------------------- factories ----------------------------------------------
/** A factory method for `ClassDef` nodes.
@@ -2897,7 +2624,8 @@ trait Trees { self: Universe =>
def transform(tree: Tree): Tree = itransform(this, tree)
/** Transforms a list of trees. */
- def transformTrees(trees: List[Tree]): List[Tree] = trees mapConserve (transform(_))
+ def transformTrees(trees: List[Tree]): List[Tree] =
+ if (trees.isEmpty) Nil else trees mapConserve transform
/** Transforms a `Template`. */
def transformTemplate(tree: Template): Template =
@@ -2907,7 +2635,8 @@ trait Trees { self: Universe =>
trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
/** Transforms a `ValDef`. */
def transformValDef(tree: ValDef): ValDef =
- if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
+ if (tree eq emptyValDef) tree
+ else transform(tree).asInstanceOf[ValDef]
/** Transforms a list of `ValDef` nodes. */
def transformValDefs(trees: List[ValDef]): List[ValDef] =
trees mapConserve (transformValDef(_))
@@ -2926,8 +2655,10 @@ trait Trees { self: Universe =>
if (exprOwner != currentOwner && stat.isTerm) atOwner(exprOwner)(transform(stat))
else transform(stat)) filter (EmptyTree != _)
/** Transforms `Modifiers`. */
- def transformModifiers(mods: Modifiers): Modifiers =
- mods.mapAnnotations(transformTrees)
+ def transformModifiers(mods: Modifiers): Modifiers = {
+ if (mods.annotations.isEmpty) mods
+ else mods mapAnnotations transformTrees
+ }
/** Transforms a tree with a given owner symbol. */
def atOwner[A](owner: Symbol)(trans: => A): A = {
@@ -2956,12 +2687,6 @@ trait Trees { self: Universe =>
*/
type Modifiers >: Null <: AnyRef with ModifiersApi
- /** A tag that preserves the identity of the `Modifiers` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Traversal
- */
- implicit val ModifiersTag: ClassTag[Modifiers]
-
/** The API that all Modifiers support
* @group API
*/
@@ -2993,15 +2718,19 @@ trait Trees { self: Universe =>
/** The constructor/extractor for `Modifiers` instances.
* @group Traversal
*/
- val Modifiers: ModifiersCreator
+ val Modifiers: ModifiersExtractor
+
+ @deprecated("Use ModifiersExtractor instead", "2.11.0")
+ type ModifiersCreator = ModifiersExtractor
/** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`.
* Modifiers encapsulate flags, visibility annotations and Scala annotations for member definitions.
* @group Traversal
*/
- abstract class ModifiersCreator {
+ abstract class ModifiersExtractor {
def apply(): Modifiers = Modifiers(NoFlags, tpnme.EMPTY, List())
def apply(flags: FlagSet, privateWithin: Name, annotations: List[Tree]): Modifiers
+ def unapply(mods: Modifiers): Option[(FlagSet, Name, List[Tree])]
}
/** The factory for `Modifiers` instances.
diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala
index 24271cb48d..37fff90b43 100644
--- a/src/reflect/scala/reflect/api/TypeCreator.scala
+++ b/src/reflect/scala/reflect/api/TypeCreator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/** A mirror-aware factory for types.
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index 72163ef0e9..9d2d06da69 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -50,7 +51,8 @@ package api
*
* @contentDiagram hideNodes "*Api"
*/
-trait Types { self: Universe =>
+trait Types {
+ self: Universe =>
/** The type of Scala types, and also Scala type signatures.
* (No difference is internally made between the two).
@@ -59,12 +61,6 @@ trait Types { self: Universe =>
*/
type Type >: Null <: TypeApi
- /** A tag that preserves the identity of the `Type` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeTagg: ClassTag[Type]
-
/** This constant is used as a special value that indicates that no meaningful type exists.
* @group Types
*/
@@ -214,6 +210,12 @@ trait Types { self: Universe =>
/******************* helpers *******************/
+ /** Provides an alternate if type is NoType.
+ *
+ * @group Helpers
+ */
+ def orElse(alt: => Type): Type
+
/** Substitute symbols in `to` for corresponding occurrences of references to
* symbols `from` in this type.
*/
@@ -256,12 +258,6 @@ trait Types { self: Universe =>
*/
type SingletonType >: Null <: Type
- /** A tag that preserves the identity of the `SingletonType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingletonTypeTag: ClassTag[SingletonType]
-
/** A singleton type that describes types of the form on the left with the
* corresponding `ThisType` representation to the right:
* {{{
@@ -272,12 +268,6 @@ trait Types { self: Universe =>
*/
type ThisType >: Null <: AnyRef with SingletonType with ThisTypeApi
- /** A tag that preserves the identity of the `ThisType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ThisTypeTag: ClassTag[ThisType]
-
/** The constructor/extractor for `ThisType` instances.
* @group Extractors
*/
@@ -316,12 +306,6 @@ trait Types { self: Universe =>
*/
type SingleType >: Null <: AnyRef with SingletonType with SingleTypeApi
- /** A tag that preserves the identity of the `SingleType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SingleTypeTag: ClassTag[SingleType]
-
/** The constructor/extractor for `SingleType` instances.
* @group Extractors
*/
@@ -361,12 +345,6 @@ trait Types { self: Universe =>
*/
type SuperType >: Null <: AnyRef with SingletonType with SuperTypeApi
- /** A tag that preserves the identity of the `SuperType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val SuperTypeTag: ClassTag[SuperType]
-
/** The constructor/extractor for `SuperType` instances.
* @group Extractors
*/
@@ -406,12 +384,6 @@ trait Types { self: Universe =>
*/
type ConstantType >: Null <: AnyRef with SingletonType with ConstantTypeApi
- /** A tag that preserves the identity of the `ConstantType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ConstantTypeTag: ClassTag[ConstantType]
-
/** The constructor/extractor for `ConstantType` instances.
* @group Extractors
*/
@@ -450,12 +422,6 @@ trait Types { self: Universe =>
*/
type TypeRef >: Null <: AnyRef with Type with TypeRefApi
- /** A tag that preserves the identity of the `TypeRef` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeRefTag: ClassTag[TypeRef]
-
/** The constructor/extractor for `TypeRef` instances.
* @group Extractors
*/
@@ -497,12 +463,6 @@ trait Types { self: Universe =>
*/
type CompoundType >: Null <: AnyRef with Type
- /** A tag that preserves the identity of the `CompoundType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val CompoundTypeTag: ClassTag[CompoundType]
-
/** The `RefinedType` type defines types of any of the forms on the left,
* with their RefinedType representations to the right.
* {{{
@@ -515,12 +475,6 @@ trait Types { self: Universe =>
*/
type RefinedType >: Null <: AnyRef with CompoundType with RefinedTypeApi
- /** A tag that preserves the identity of the `RefinedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val RefinedTypeTag: ClassTag[RefinedType]
-
/** The constructor/extractor for `RefinedType` instances.
* @group Extractors
*/
@@ -567,12 +521,6 @@ trait Types { self: Universe =>
*/
type ClassInfoType >: Null <: AnyRef with CompoundType with ClassInfoTypeApi
- /** A tag that preserves the identity of the `ClassInfoType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ClassInfoTypeTag: ClassTag[ClassInfoType]
-
/** The constructor/extractor for `ClassInfoType` instances.
* @group Extractors
*/
@@ -610,12 +558,6 @@ trait Types { self: Universe =>
*/
type MethodType >: Null <: AnyRef with Type with MethodTypeApi
- /** A tag that preserves the identity of the `MethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val MethodTypeTag: ClassTag[MethodType]
-
/** The constructor/extractor for `MethodType` instances.
* @group Extractors
*/
@@ -660,12 +602,6 @@ trait Types { self: Universe =>
*/
type NullaryMethodType >: Null <: AnyRef with Type with NullaryMethodTypeApi
- /** A tag that preserves the identity of the `NullaryMethodType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val NullaryMethodTypeTag: ClassTag[NullaryMethodType]
-
/** The constructor/extractor for `NullaryMethodType` instances.
* @group Extractors
*/
@@ -696,12 +632,6 @@ trait Types { self: Universe =>
*/
type PolyType >: Null <: AnyRef with Type with PolyTypeApi
- /** A tag that preserves the identity of the `PolyType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val PolyTypeTag: ClassTag[PolyType]
-
/** The constructor/extractor for `PolyType` instances.
* @group Extractors
*/
@@ -736,12 +666,6 @@ trait Types { self: Universe =>
*/
type ExistentialType >: Null <: AnyRef with Type with ExistentialTypeApi
- /** A tag that preserves the identity of the `ExistentialType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val ExistentialTypeTag: ClassTag[ExistentialType]
-
/** The constructor/extractor for `ExistentialType` instances.
* @group Extractors
*/
@@ -777,12 +701,6 @@ trait Types { self: Universe =>
*/
type AnnotatedType >: Null <: AnyRef with Type with AnnotatedTypeApi
- /** A tag that preserves the identity of the `AnnotatedType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val AnnotatedTypeTag: ClassTag[AnnotatedType]
-
/** The constructor/extractor for `AnnotatedType` instances.
* @group Extractors
*/
@@ -828,12 +746,6 @@ trait Types { self: Universe =>
*/
type TypeBounds >: Null <: AnyRef with Type with TypeBoundsApi
- /** A tag that preserves the identity of the `TypeBounds` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val TypeBoundsTag: ClassTag[TypeBounds]
-
/** The constructor/extractor for `TypeBounds` instances.
* @group Extractors
*/
@@ -885,12 +797,6 @@ trait Types { self: Universe =>
*/
type BoundedWildcardType >: Null <: AnyRef with Type with BoundedWildcardTypeApi
- /** A tag that preserves the identity of the `BoundedWildcardType` abstract type from erasure.
- * Can be used for pattern matching, instance tests, serialization and likes.
- * @group Tags
- */
- implicit val BoundedWildcardTypeTag: ClassTag[BoundedWildcardType]
-
/** The constructor/extractor for `BoundedWildcardType` instances.
* @group Extractors
*/
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index 4928b8bb38..cb629f9c5a 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package api
/**
@@ -68,6 +69,7 @@ abstract class Universe extends Symbols
with Exprs
with TypeTags
with TagInterop
+ with ImplicitTags
with StandardDefinitions
with StandardNames
with BuildUtils
@@ -92,5 +94,5 @@ abstract class Universe extends Symbols
*/
// implementation is hardwired to `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def reify[T](expr: T): Expr[T] = ??? // macro
-} \ No newline at end of file
+ def reify[T](expr: T): Expr[T] = macro ???
+}
diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala
index dbda84dd0e..a8f409e123 100644
--- a/src/reflect/scala/reflect/api/package.scala
+++ b/src/reflect/scala/reflect/api/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
import scala.reflect.api.{Universe => ApiUniverse}
@@ -42,6 +43,6 @@ package object api {
// implementation is hardwired into `scala.reflect.reify.Taggers`
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
// todo. once we have implicit macros for tag generation, we can remove these anchors
- private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = ??? // macro
- private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = ??? // macro
+ private[scala] def materializeWeakTypeTag[T](u: ApiUniverse): u.WeakTypeTag[T] = macro ???
+ private[scala] def materializeTypeTag[T](u: ApiUniverse): u.TypeTag[T] = macro ???
} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 1ab975b233..74310e1c34 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
/** Additions to the type checker that can be added at
@@ -52,7 +53,7 @@ trait AnnotationCheckers {
* given type tp, taking into account the given mode (see method adapt in trait Typers).
*/
@deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
+ def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false
/**
* Adapt a tree that has an annotated type to the given type tp, taking into account the given
@@ -62,7 +63,7 @@ trait AnnotationCheckers {
* class cannot do the adaptiong, it should return the tree unchanged.
*/
@deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
+ def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree
/**
* Adapt the type of a return expression. The decision of a typer plugin whether the type
@@ -126,13 +127,13 @@ trait AnnotationCheckers {
else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+ def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean =
if (annotationCheckers.isEmpty) false
else annotationCheckers.exists(checker => {
checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
})
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+ def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree =
if (annotationCheckers.isEmpty) tree
else annotationCheckers.foldLeft(tree)((tree, checker) =>
if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 032b45316e..73ca74f08e 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -3,17 +3,17 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
-import util._
import pickling.ByteCodecs
import scala.annotation.tailrec
import scala.collection.immutable.ListMap
/** AnnotationInfo and its helpers */
trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
- import definitions.{ ThrowsClass, StaticAnnotationClass, isMetaAnnotation }
+ import definitions.{ ThrowsClass, ThrowableClass, StaticAnnotationClass, isMetaAnnotation }
// Common annotation code between Symbol and Type.
// For methods altering the annotation list, on Symbol it mutates
@@ -40,8 +40,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
// monomorphic one by introducing existentials, see SI-7009 for details
existentialAbstraction(throwableSym.typeParams, throwableSym.tpe)
}
- val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
- withAnnotations(List(throwsAnn))
+ this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
}
/** Tests for, get, or remove an annotation */
@@ -122,25 +121,32 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
* must be `String`. This specialised class is used to encode Scala
* signatures for reasons of efficiency, both in term of class-file size
* and in term of compiler performance.
+ * Details about the storage format of pickles at the bytecode level (classfile annotations) can be found in SIP-10.
*/
case class ScalaSigBytes(bytes: Array[Byte]) extends ClassfileAnnotArg {
override def toString = (bytes map { byte => (byte & 0xff).toHexString }).mkString("[ ", " ", " ]")
- lazy val encodedBytes = ByteCodecs.encode(bytes) // TODO remove after migration to ASM-based GenJVM complete
- def isLong: Boolean = (encodedBytes.length > 65535) // TODO remove after migration to ASM-based GenJVM complete
lazy val sevenBitsMayBeZero: Array[Byte] = {
mapToNextModSevenBits(scala.reflect.internal.pickling.ByteCodecs.encode8to7(bytes))
}
+
+ /* In order to store a byte array (the pickle) using a bytecode-level annotation,
+ * the most compact representation is used (which happens to be string-constant and not byte array as one would expect).
+ * However, a String constant in a classfile annotation is limited to a maximum of 65535 characters.
+ * Method `fitsInOneString` tells us whether the pickle can be held by a single classfile-annotation of string-type.
+ * Otherwise an array of strings will be used.
+ */
def fitsInOneString: Boolean = {
+ // due to escaping, a zero byte in a classfile-annotation of string-type takes actually two characters.
val numZeros = (sevenBitsMayBeZero count { b => b == 0 })
- val res = (sevenBitsMayBeZero.length + numZeros) <= 65535
- assert(this.isLong == !res, "As things stand, can't just swap in `fitsInOneString()` for `isLong()`")
- res
+
+ (sevenBitsMayBeZero.length + numZeros) <= 65535
}
+
def sigAnnot: Type =
- if (this.isLong)
- definitions.ScalaLongSignatureAnnotation.tpe
- else
+ if (fitsInOneString)
definitions.ScalaSignatureAnnotation.tpe
+ else
+ definitions.ScalaLongSignatureAnnotation.tpe
private def mapToNextModSevenBits(src: Array[Byte]): Array[Byte] = {
var i = 0
@@ -304,10 +310,6 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
/** Check whether any of the arguments mention a symbol */
def refsSymbol(sym: Symbol) = hasArgWhich(_.symbol == sym)
- /** Change all ident's with Symbol "from" to instead use symbol "to" */
- def substIdentSyms(from: Symbol, to: Symbol) =
- AnnotationInfo(atp, args map (_ substituteSymbols (List(from), List(to))), assocs) setPos pos
-
def stringArg(index: Int) = constantAtIndex(index) map (_.stringValue)
def intArg(index: Int) = constantAtIndex(index) map (_.intValue)
def symbolArg(index: Int) = argAtIndex(index) collect {
@@ -342,13 +344,15 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
+ object ErroneousAnnotation extends CompleteAnnotationInfo(ErrorType, Nil, Nil)
+
/** Extracts symbol of thrown exception from AnnotationInfo.
*
* Supports both “old-style” `@throws(classOf[Exception])`
* as well as “new-stye” `@throws[Exception]("cause")` annotations.
*/
object ThrownException {
- def unapply(ann: AnnotationInfo): Option[Symbol] =
+ def unapply(ann: AnnotationInfo): Option[Symbol] = {
ann match {
case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
None
@@ -356,8 +360,11 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
case AnnotationInfo(_, List(Literal(Constant(tpe: Type))), _) =>
Some(tpe.typeSymbol)
// new-style: @throws[Exception], @throws[Exception]("cause")
- case AnnotationInfo(TypeRef(_, _, args), _, _) =>
- Some(args.head.typeSymbol)
+ case AnnotationInfo(TypeRef(_, _, arg :: _), _, _) =>
+ Some(arg.typeSymbol)
+ case AnnotationInfo(TypeRef(_, _, Nil), _, _) =>
+ Some(ThrowableClass)
}
+ }
}
}
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 3c2b128c52..368e1cde30 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
// todo implement in terms of BitSet
@@ -64,7 +65,7 @@ trait BaseTypeSeqs {
//Console.println("compute closure of "+this+" => glb("+variants+")")
pending += i
try {
- mergePrefixAndArgs(variants, -1, lubDepth(variants)) match {
+ mergePrefixAndArgs(variants, Variance.Contravariant, lubDepth(variants)) match {
case Some(tp0) =>
pending(i) = false
elems(i) = tp0
@@ -115,7 +116,7 @@ trait BaseTypeSeqs {
def map(f: Type => Type): BaseTypeSeq = {
// inlined `elems map f` for performance
val len = length
- var arr = new Array[Type](len)
+ val arr = new Array[Type](len)
var i = 0
while (i < len) {
arr(i) = f(elems(i))
@@ -158,7 +159,7 @@ trait BaseTypeSeqs {
val parents = tp.parents
// Console.println("computing baseTypeSeq of " + tsym.tpe + " " + parents)//DEBUG
val buf = new mutable.ListBuffer[Type]
- buf += tsym.tpe
+ buf += tsym.tpe_*
var btsSize = 1
if (parents.nonEmpty) {
val nparents = parents.length
@@ -180,7 +181,7 @@ trait BaseTypeSeqs {
def nextRawElem(i: Int): Type = {
val j = index(i)
val pbts = pbtss(i)
- if (j < pbts.length) pbts.rawElem(j) else AnyClass.tpe
+ if (j < pbts.length) pbts.rawElem(j) else AnyTpe
}
var minSym: Symbol = NoSymbol
while (minSym != AnyClass) {
@@ -193,15 +194,23 @@ trait BaseTypeSeqs {
i += 1
}
var minTypes: List[Type] = List()
+ def alreadyInMinTypes(tp: Type): Boolean = {
+ @annotation.tailrec def loop(tps: List[Type]): Boolean = tps match {
+ case Nil => false
+ case x :: xs => (tp =:= x) || loop(xs)
+ }
+ loop(minTypes)
+ }
+
i = 0
while (i < nparents) {
if (nextTypeSymbol(i) == minSym) {
nextRawElem(i) match {
case RefinedType(variants, decls) =>
for (tp <- variants)
- if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+ if (!alreadyInMinTypes(tp)) minTypes ::= tp
case tp =>
- if (!(minTypes exists (tp =:= _))) minTypes = tp :: minTypes
+ if (!alreadyInMinTypes(tp)) minTypes ::= tp
}
index(i) = index(i) + 1
}
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 9f41f0336e..ece2d28be3 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -1,8 +1,7 @@
-package scala.reflect
+package scala
+package reflect
package internal
-import Flags._
-
trait BuildUtils { self: SymbolTable =>
class BuildImpl extends BuildApi {
@@ -47,8 +46,6 @@ trait BuildUtils { self: SymbolTable =>
def flagsFromBits(bits: Long): FlagSet = bits
- def emptyValDef: ValDef = self.emptyValDef
-
def This(sym: Symbol): Tree = self.This(sym)
def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala
index 77909d9157..2c5e87b95d 100644
--- a/src/reflect/scala/reflect/internal/CapturedVariables.scala
+++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -19,7 +20,7 @@ trait CapturedVariables { self: SymbolTable =>
/** Convert type of a captured variable to *Ref type.
*/
def capturedVariableType(vble: Symbol): Type =
- capturedVariableType(vble, NoType, false)
+ capturedVariableType(vble, NoType, erasedTypes = false)
/** Convert type of a captured variable to *Ref type.
*/
diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala
index 2d07092862..74413fdaba 100644
--- a/src/reflect/scala/reflect/internal/Chars.scala
+++ b/src/reflect/scala/reflect/internal/Chars.scala
@@ -2,7 +2,8 @@
* Copyright 2006-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.annotation.{ tailrec, switch }
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index c198271fb1..a7ce044780 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -3,13 +3,13 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.annotation.switch
object ClassfileConstants {
-
final val JAVA_MAGIC = 0xCAFEBABE
final val JAVA_MAJOR_VERSION = 45
final val JAVA_MINOR_VERSION = 3
@@ -345,7 +345,7 @@ object ClassfileConstants {
case JAVA_ACC_PRIVATE => PRIVATE
case JAVA_ACC_PROTECTED => PROTECTED
case JAVA_ACC_FINAL => FINAL
- case JAVA_ACC_SYNTHETIC => SYNTHETIC
+ case JAVA_ACC_SYNTHETIC => SYNTHETIC | ARTIFACT // maybe should be just artifact?
case JAVA_ACC_STATIC => STATIC
case JAVA_ACC_ABSTRACT => if (isAnnotation) 0L else if (isClass) ABSTRACT else DEFERRED
case JAVA_ACC_INTERFACE => if (isAnnotation) 0L else TRAIT | INTERFACE | ABSTRACT
@@ -353,7 +353,7 @@ object ClassfileConstants {
}
private def translateFlags(jflags: Int, baseFlags: Long): Long = {
var res: Long = JAVA | baseFlags
- /** fast, elegant, maintainable, pick any two... */
+ /* fast, elegant, maintainable, pick any two... */
res |= translateFlag(jflags & JAVA_ACC_PRIVATE)
res |= translateFlag(jflags & JAVA_ACC_PROTECTED)
res |= translateFlag(jflags & JAVA_ACC_FINAL)
@@ -375,7 +375,7 @@ object ClassfileConstants {
}
def methodFlags(jflags: Int): Long = {
initFields(jflags)
- translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE else 0)
+ translateFlags(jflags, if ((jflags & JAVA_ACC_BRIDGE) != 0) BRIDGE | ARTIFACT else 0)
}
}
object FlagTranslation extends FlagTranslation { }
@@ -383,11 +383,4 @@ object ClassfileConstants {
def toScalaMethodFlags(flags: Int): Long = FlagTranslation methodFlags flags
def toScalaClassFlags(flags: Int): Long = FlagTranslation classFlags flags
def toScalaFieldFlags(flags: Int): Long = FlagTranslation fieldFlags flags
-
- @deprecated("Use another method in this object", "2.10.0")
- def toScalaFlags(flags: Int, isClass: Boolean = false, isField: Boolean = false): Long = (
- if (isClass) toScalaClassFlags(flags)
- else if (isField) toScalaFieldFlags(flags)
- else toScalaMethodFlags(flags)
- )
}
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 28bc3e1dd0..511b39b8c6 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import java.lang.Integer.toOctalString
@@ -62,17 +63,17 @@ trait Constants extends api.Constants {
def isAnyVal = UnitTag <= tag && tag <= DoubleTag
def tpe: Type = tag match {
- case UnitTag => UnitClass.tpe
- case BooleanTag => BooleanClass.tpe
- case ByteTag => ByteClass.tpe
- case ShortTag => ShortClass.tpe
- case CharTag => CharClass.tpe
- case IntTag => IntClass.tpe
- case LongTag => LongClass.tpe
- case FloatTag => FloatClass.tpe
- case DoubleTag => DoubleClass.tpe
- case StringTag => StringClass.tpe
- case NullTag => NullClass.tpe
+ case UnitTag => UnitTpe
+ case BooleanTag => BooleanTpe
+ case ByteTag => ByteTpe
+ case ShortTag => ShortTpe
+ case CharTag => CharTpe
+ case IntTag => IntTpe
+ case LongTag => LongTpe
+ case FloatTag => FloatTpe
+ case DoubleTag => DoubleTpe
+ case StringTag => StringTpe
+ case NullTag => NullTpe
case ClazzTag => ClassType(typeValue)
case EnumTag => EnumType(symbolValue)
}
@@ -94,7 +95,7 @@ trait Constants extends api.Constants {
def booleanValue: Boolean =
if (tag == BooleanTag) value.asInstanceOf[Boolean]
- else throw new Error("value " + value + " is not a boolean");
+ else throw new Error("value " + value + " is not a boolean")
def byteValue: Byte = tag match {
case ByteTag => value.asInstanceOf[Byte]
@@ -211,7 +212,7 @@ trait Constants extends api.Constants {
case '"' => "\\\""
case '\'' => "\\\'"
case '\\' => "\\\\"
- case _ => if (ch.isControl) "\\0" + toOctalString(ch) else String.valueOf(ch)
+ case _ => if (ch.isControl) "\\0" + toOctalString(ch.toInt) else String.valueOf(ch)
}
def escapedStringValue: String = {
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 629d82d254..851fc98a32 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -3,19 +3,20 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
+import scala.language.postfixOps
import scala.annotation.{ switch, meta }
import scala.collection.{ mutable, immutable }
import Flags._
-import PartialFunction._
import scala.reflect.api.{Universe => ApiUniverse}
trait Definitions extends api.StandardDefinitions {
self: SymbolTable =>
- import rootMirror.{getModule, getClassByName, getRequiredClass, getRequiredModule, getRequiredPackage, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule}
+ import rootMirror.{getModule, getPackage, getClassByName, getRequiredClass, getRequiredModule, getClassIfDefined, getModuleIfDefined, getPackageObject, getPackageObjectIfDefined, requiredClass, requiredModule}
object definitions extends DefinitionsClass
@@ -31,7 +32,7 @@ trait Definitions extends api.StandardDefinitions {
val clazz = owner.newClassSymbol(name, NoPosition, flags)
clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz)
}
- private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = {
+ private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = {
val msym = owner.newMethod(name.encode, NoPosition, flags)
val params = msym.newSyntheticValueParams(formals)
msym setInfo MethodType(params, restpe)
@@ -128,15 +129,15 @@ trait Definitions extends api.StandardDefinitions {
lazy val Boolean_or = getMemberMethod(BooleanClass, nme.ZOR)
lazy val Boolean_not = getMemberMethod(BooleanClass, nme.UNARY_!)
- lazy val UnitTpe = UnitClass.toTypeConstructor
- lazy val ByteTpe = ByteClass.toTypeConstructor
- lazy val ShortTpe = ShortClass.toTypeConstructor
- lazy val CharTpe = CharClass.toTypeConstructor
- lazy val IntTpe = IntClass.toTypeConstructor
- lazy val LongTpe = LongClass.toTypeConstructor
- lazy val FloatTpe = FloatClass.toTypeConstructor
- lazy val DoubleTpe = DoubleClass.toTypeConstructor
- lazy val BooleanTpe = BooleanClass.toTypeConstructor
+ lazy val UnitTpe = UnitClass.tpe
+ lazy val ByteTpe = ByteClass.tpe
+ lazy val ShortTpe = ShortClass.tpe
+ lazy val CharTpe = CharClass.tpe
+ lazy val IntTpe = IntClass.tpe
+ lazy val LongTpe = LongClass.tpe
+ lazy val FloatTpe = FloatClass.tpe
+ lazy val DoubleTpe = DoubleClass.tpe
+ lazy val BooleanTpe = BooleanClass.tpe
lazy val ScalaNumericValueClasses = ScalaValueClasses filterNot Set[Symbol](UnitClass, BooleanClass)
lazy val ScalaValueClassesNoUnit = ScalaValueClasses filterNot (_ eq UnitClass)
@@ -151,7 +152,6 @@ trait Definitions extends api.StandardDefinitions {
FloatClass,
DoubleClass
)
- def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol)
def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses
}
@@ -159,9 +159,6 @@ trait Definitions extends api.StandardDefinitions {
private var isInitialized = false
def isDefinitionsInitialized = isInitialized
- // symbols related to packages
- var emptypackagescope: Scope = null //debug
-
@deprecated("Moved to rootMirror.RootPackage", "2.10.0")
val RootPackage: ModuleSymbol = rootMirror.RootPackage
@@ -176,25 +173,13 @@ trait Definitions extends api.StandardDefinitions {
// It becomes tricky to create dedicated objects for other symbols because
// of initialization order issues.
- lazy val JavaLangPackage = getRequiredPackage(sn.JavaLang)
+ lazy val JavaLangPackage = getPackage("java.lang")
lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass
- lazy val ScalaPackage = getRequiredPackage(nme.scala_)
+ lazy val ScalaPackage = getPackage("scala")
lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass
- lazy val RuntimePackage = getRequiredPackage("scala.runtime")
+ lazy val RuntimePackage = getPackage("scala.runtime")
lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass
- lazy val JavaLangEnumClass = requiredClass[java.lang.Enum[_]]
-
- // convenient one-argument parameter lists
- lazy val anyparam = List(AnyClass.tpe)
- lazy val anyvalparam = List(AnyValClass.typeConstructor)
- lazy val anyrefparam = List(AnyRefClass.typeConstructor)
-
- // private parameter conveniences
- private def booltype = BooleanClass.tpe
- private def inttype = IntClass.tpe
- private def stringtype = StringClass.tpe
-
def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match {
case java.lang.Void.TYPE => UnitClass
case java.lang.Byte.TYPE => ByteClass
@@ -224,47 +209,77 @@ trait Definitions extends api.StandardDefinitions {
*/
def fullyInitializeSymbol(sym: Symbol): Symbol = {
sym.initialize
+ // Watch out for those darn raw types on method parameters
+ if (sym.owner.initialize.isJavaDefined)
+ sym.cookJavaRawInfo()
+
fullyInitializeType(sym.info)
- fullyInitializeType(sym.tpe)
+ fullyInitializeType(sym.tpe_*)
sym
}
def fullyInitializeType(tp: Type): Type = {
tp.typeParams foreach fullyInitializeSymbol
- tp.paramss.flatten foreach fullyInitializeSymbol
+ mforeach(tp.paramss)(fullyInitializeSymbol)
tp
}
def fullyInitializeScope(scope: Scope): Scope = {
scope.sorted foreach fullyInitializeSymbol
scope
}
+ /** Is this symbol a member of Object or Any? */
+ def isUniversalMember(sym: Symbol) = (
+ (sym ne NoSymbol)
+ && (ObjectClass isSubClass sym.owner)
+ )
+
+ /** Is this symbol unimportable? Unimportable symbols include:
+ * - constructors, because <init> is not a real name
+ * - private[this] members, which cannot be referenced from anywhere else
+ * - members of Any or Object, because every instance will inherit a
+ * definition which supersedes the imported one
+ */
+ def isUnimportable(sym: Symbol) = (
+ (sym eq NoSymbol)
+ || sym.isConstructor
+ || sym.isPrivateLocal
+ )
+ def isUnimportableUnlessRenamed(sym: Symbol) = isUnimportable(sym) || isUniversalMember(sym)
+ def isImportable(sym: Symbol) = !isUnimportable(sym)
+
/** Is this type equivalent to Any, AnyVal, or AnyRef? */
def isTrivialTopType(tp: Type) = (
- tp =:= AnyClass.tpe
- || tp =:= AnyValClass.tpe
- || tp =:= AnyRefClass.tpe
+ tp =:= AnyTpe
+ || tp =:= AnyValTpe
+ || tp =:= AnyRefTpe
)
- /** Does this type have a parent which is none of Any, AnyVal, or AnyRef? */
- def hasNonTrivialParent(tp: Type) = tp.parents exists (t => !isTrivialTopType(tp))
private def fixupAsAnyTrait(tpe: Type): Type = tpe match {
case ClassInfoType(parents, decls, clazz) =>
if (parents.head.typeSymbol == AnyClass) tpe
else {
assert(parents.head.typeSymbol == ObjectClass, parents)
- ClassInfoType(AnyClass.tpe :: parents.tail, decls, clazz)
+ ClassInfoType(AnyTpe :: parents.tail, decls, clazz)
}
case PolyType(tparams, restpe) =>
PolyType(tparams, fixupAsAnyTrait(restpe))
-// case _ => tpe
}
// top types
lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT)
- lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.tpe)
+ lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)
lazy val ObjectClass = getRequiredClass(sn.Object.toString)
- lazy val AnyTpe = definitions.AnyClass.toTypeConstructor
- lazy val AnyRefTpe = definitions.AnyRefClass.toTypeConstructor
- lazy val ObjectTpe = definitions.ObjectClass.toTypeConstructor
+
+ // Cached types for core monomorphic classes
+ lazy val AnyRefTpe = AnyRefClass.tpe
+ lazy val AnyTpe = AnyClass.tpe
+ lazy val AnyValTpe = AnyValClass.tpe
+ lazy val BoxedUnitTpe = BoxedUnitClass.tpe
+ lazy val NothingTpe = NothingClass.tpe
+ lazy val NullTpe = NullClass.tpe
+ lazy val ObjectTpe = ObjectClass.tpe
+ lazy val SerializableTpe = SerializableClass.tpe
+ lazy val StringTpe = StringClass.tpe
+ lazy val ThrowableTpe = ThrowableClass.tpe
// Note: this is not the type alias AnyRef, it's a companion-like
// object used by the @specialize annotation.
@@ -273,12 +288,11 @@ trait Definitions extends api.StandardDefinitions {
def Predef_AnyRef = AnyRefModule
lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
- val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, List(AnyClass.tpe, NotNullClass.tpe), ABSTRACT)
+ val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
anyval.info.decls enter av_constr
anyval
}).asInstanceOf[ClassSymbol]
- lazy val AnyValTpe = definitions.AnyValClass.toTypeConstructor
def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_)
// bottom types
@@ -301,8 +315,6 @@ trait Definitions extends api.StandardDefinitions {
|| (that ne NothingClass) && (that isSubClass ObjectClass)
)
}
- lazy val NothingTpe = definitions.NothingClass.toTypeConstructor
- lazy val NullTpe = definitions.NullClass.toTypeConstructor
// exceptions and other throwables
lazy val ClassCastExceptionClass = requiredClass[ClassCastException]
@@ -314,6 +326,11 @@ trait Definitions extends api.StandardDefinitions {
lazy val ThrowableClass = getClassByName(sn.Throwable)
lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError]
+ @deprecated("Same effect but more compact: `throw null`. Details in JVM spec, `athrow` instruction.", "2.11.0")
+ lazy val NPEConstructor = getMemberMethod(NullPointerExceptionClass, nme.CONSTRUCTOR) suchThat (_.paramss.flatten.isEmpty)
+
+ lazy val UninitializedFieldConstructor = UninitializedErrorClass.primaryConstructor
+
// fundamental reference classes
lazy val PartialFunctionClass = requiredClass[PartialFunction[_,_]]
lazy val AbstractPartialFunctionClass = requiredClass[scala.runtime.AbstractPartialFunction[_,_]]
@@ -337,15 +354,11 @@ trait Definitions extends api.StandardDefinitions {
lazy val UnqualifiedOwners = UnqualifiedModules.toSet ++ UnqualifiedModules.map(_.moduleClass)
lazy val PredefModule = requiredModule[scala.Predef.type]
- lazy val PredefModuleClass = PredefModule.moduleClass
-
def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
- def Predef_identity = getMemberMethod(PredefModule, nme.identity)
- def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
- def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
- def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
- def Predef_??? = getMemberMethod(PredefModule, nme.???)
- def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
+ def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
+ def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
+ def Predef_??? = getMemberMethod(PredefModule, nme.???)
+ def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
@@ -361,7 +374,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val SpecializableModule = requiredModule[Specializable]
lazy val GroupOfSpecializable = getMemberClass(SpecializableModule, tpnme.Group)
- lazy val ConsoleModule = requiredModule[scala.Console.type]
lazy val ScalaRunTimeModule = requiredModule[scala.runtime.ScalaRunTime.type]
lazy val SymbolModule = requiredModule[scala.Symbol.type]
lazy val Symbol_apply = getMemberMethod(SymbolModule, nme.apply)
@@ -371,27 +383,19 @@ trait Definitions extends api.StandardDefinitions {
def arrayLengthMethod = getMemberMethod(ScalaRunTimeModule, nme.array_length)
def arrayCloneMethod = getMemberMethod(ScalaRunTimeModule, nme.array_clone)
def ensureAccessibleMethod = getMemberMethod(ScalaRunTimeModule, nme.ensureAccessible)
- def scalaRuntimeSameElements = getMemberMethod(ScalaRunTimeModule, nme.sameElements)
def arrayClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayClass)
- def arrayElementClassMethod = getMemberMethod(ScalaRunTimeModule, nme.arrayElementClass)
// classes with special meanings
lazy val StringAddClass = requiredClass[scala.runtime.StringAdd]
lazy val ArrowAssocClass = getRequiredClass("scala.Predef.ArrowAssoc") // SI-5731
lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS)
- lazy val NotNullClass = getRequiredClass("scala.NotNull")
lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber]
lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter]
lazy val DelayedInitClass = requiredClass[scala.DelayedInit]
def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit)
- // a dummy value that communicates that a delayedInit call is compiler-generated
- // from phase UnCurry to phase Constructors
- // !!! This is not used anywhere (it was checked in that way.)
- // def delayedInitArgVal = EmptyPackageClass.newValue(NoPosition, nme.delayedInitArg)
- // .setInfo(UnitClass.tpe)
lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint]
- lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, anyparam, ABSTRACT | TRAIT | FINAL)
+ lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL)
lazy val SerializableClass = requiredClass[scala.Serializable]
lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait
lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait
@@ -401,16 +405,20 @@ trait Definitions extends api.StandardDefinitions {
lazy val RemoteInterfaceClass = requiredClass[java.rmi.Remote]
lazy val RemoteExceptionClass = requiredClass[java.rmi.RemoteException]
- lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.tpe)
- lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.tpe)
+ lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyTpe)
lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.tpe))
lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.tpe))
+ def dropByName(tp: Type): Type = tp match {
+ case TypeRef(_, ByNameParamClass, arg :: Nil) => arg
+ case _ => tp
+ }
def isByNameParamType(tp: Type) = tp.typeSymbol == ByNameParamClass
def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass
def isJavaRepeatedParamType(tp: Type) = tp.typeSymbol == JavaRepeatedParamClass
def isRepeatedParamType(tp: Type) = isScalaRepeatedParamType(tp) || isJavaRepeatedParamType(tp)
- def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe)
+ def isRepeated(param: Symbol) = isRepeatedParamType(param.tpe_*)
+ def isByName(param: Symbol) = isByNameParamType(param.tpe_*)
def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf
def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params)
@@ -425,6 +433,11 @@ trait Definitions extends api.StandardDefinitions {
case _ => false
}
+ def repeatedToSingle(tp: Type): Type = tp match {
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) => arg
+ case _ => tp
+ }
+
def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
case TypeRef(_, RepeatedParamClass, arg :: Nil) => seqType(arg)
case _ => tp
@@ -435,12 +448,8 @@ trait Definitions extends api.StandardDefinitions {
case _ => tp
}
- def isPrimitiveArray(tp: Type) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => isPrimitiveValueClass(arg.typeSymbol)
- case _ => false
- }
def isReferenceArray(tp: Type) = tp match {
- case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefClass.tpe
+ case TypeRef(_, ArrayClass, arg :: Nil) => arg <:< AnyRefTpe
case _ => false
}
def isArrayOfSymbol(tp: Type, elem: Symbol) = tp match {
@@ -448,11 +457,8 @@ trait Definitions extends api.StandardDefinitions {
case _ => false
}
- lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy")
-
// collections classes
lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
- lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
@@ -463,8 +469,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val List_apply = getMemberMethod(ListModule, nme.apply)
lazy val NilModule = requiredModule[scala.collection.immutable.Nil.type]
lazy val SeqModule = requiredModule[scala.collection.Seq.type]
- lazy val IteratorModule = requiredModule[scala.collection.Iterator.type]
- lazy val Iterator_apply = getMemberMethod(IteratorModule, nme.apply)
// arrays and their members
lazy val ArrayModule = requiredModule[scala.Array.type]
@@ -479,9 +483,7 @@ trait Definitions extends api.StandardDefinitions {
// reflection / structural types
lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]]
- lazy val WeakReferenceClass = requiredClass[java.lang.ref.WeakReference[_]]
lazy val MethodClass = getClassByName(sn.MethodAsObject)
- def methodClass_setAccessible = getMemberMethod(MethodClass, nme.setAccessible)
lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache]
lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache]
def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_)
@@ -491,8 +493,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type]
lazy val ReflectApiPackage = getPackageObjectIfDefined("scala.reflect.api") // defined in scala-reflect.jar, so we need to be careful
lazy val ReflectRuntimePackage = getPackageObjectIfDefined("scala.reflect.runtime") // defined in scala-reflect.jar, so we need to be careful
- def ReflectRuntimeUniverse = if (ReflectRuntimePackage != NoSymbol) getMemberValue(ReflectRuntimePackage, nme.universe) else NoSymbol
- def ReflectRuntimeCurrentMirror = if (ReflectRuntimePackage != NoSymbol) getMemberMethod(ReflectRuntimePackage, nme.currentMirror) else NoSymbol
+ def ReflectRuntimeUniverse = ReflectRuntimePackage.map(sym => getMemberValue(sym, nme.universe))
+ def ReflectRuntimeCurrentMirror = ReflectRuntimePackage.map(sym => getMemberMethod(sym, nme.currentMirror))
lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest)
lazy val PartialManifestModule = requiredModule[scala.reflect.ClassManifestFactory.type]
@@ -501,25 +503,38 @@ trait Definitions extends api.StandardDefinitions {
lazy val OptManifestClass = requiredClass[scala.reflect.OptManifest[_]]
lazy val NoManifest = requiredModule[scala.reflect.NoManifest.type]
+ lazy val TreesClass = getClassIfDefined("scala.reflect.api.Trees") // defined in scala-reflect.jar, so we need to be careful
+ lazy val TreesTreeType = TreesClass.map(sym => getTypeMember(sym, tpnme.Tree))
+ object TreeType {
+ def unapply(tpe: Type): Boolean = unapply(tpe.typeSymbol)
+ def unapply(sym: Symbol): Boolean = sym.overrideChain contains TreesTreeType
+ }
+
lazy val ExprsClass = getClassIfDefined("scala.reflect.api.Exprs") // defined in scala-reflect.jar, so we need to be careful
- lazy val ExprClass = if (ExprsClass != NoSymbol) getMemberClass(ExprsClass, tpnme.Expr) else NoSymbol
- def ExprSplice = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.splice) else NoSymbol
- def ExprValue = if (ExprsClass != NoSymbol) getMemberMethod(ExprClass, nme.value) else NoSymbol
- lazy val ExprModule = if (ExprsClass != NoSymbol) getMemberModule(ExprsClass, nme.Expr) else NoSymbol
+ lazy val ExprClass = ExprsClass.map(sym => getMemberClass(sym, tpnme.Expr))
+ def ExprSplice = ExprClass.map(sym => getMemberMethod(sym, nme.splice))
+ def ExprValue = ExprClass.map(sym => getMemberMethod(sym, nme.value))
+ object ExprClassOf {
+ def unapply(tpe: Type): Option[Type] = tpe.dealias match {
+ case ExistentialType(_, underlying) => unapply(underlying)
+ case TypeRef(_, ExprClass, t :: Nil) => Some(t)
+ case _ => None
+ }
+ }
lazy val ClassTagModule = requiredModule[scala.reflect.ClassTag[_]]
lazy val ClassTagClass = requiredClass[scala.reflect.ClassTag[_]]
lazy val TypeTagsClass = getClassIfDefined("scala.reflect.api.TypeTags") // defined in scala-reflect.jar, so we need to be careful
- lazy val WeakTypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.WeakTypeTag) else NoSymbol
- lazy val WeakTypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.WeakTypeTag) else NoSymbol
- lazy val TypeTagClass = if (TypeTagsClass != NoSymbol) getMemberClass(TypeTagsClass, tpnme.TypeTag) else NoSymbol
- lazy val TypeTagModule = if (TypeTagsClass != NoSymbol) getMemberModule(TypeTagsClass, nme.TypeTag) else NoSymbol
+ lazy val WeakTypeTagClass = TypeTagsClass.map(sym => getMemberClass(sym, tpnme.WeakTypeTag))
+ lazy val WeakTypeTagModule = TypeTagsClass.map(sym => getMemberModule(sym, nme.WeakTypeTag))
+ lazy val TypeTagClass = TypeTagsClass.map(sym => getMemberClass(sym, tpnme.TypeTag))
+ lazy val TypeTagModule = TypeTagsClass.map(sym => getMemberModule(sym, nme.TypeTag))
def materializeClassTag = getMemberMethod(ReflectPackage, nme.materializeClassTag)
- def materializeWeakTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeWeakTypeTag) else NoSymbol
- def materializeTypeTag = if (ReflectApiPackage != NoSymbol) getMemberMethod(ReflectApiPackage, nme.materializeTypeTag) else NoSymbol
+ def materializeWeakTypeTag = ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeWeakTypeTag))
+ def materializeTypeTag = ReflectApiPackage.map(sym => getMemberMethod(sym, nme.materializeTypeTag))
lazy val ApiUniverseClass = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful
- def ApiUniverseReify = if (ApiUniverseClass != NoSymbol) getMemberMethod(ApiUniverseClass, nme.reify) else NoSymbol
+ def ApiUniverseReify = ApiUniverseClass.map(sym => getMemberMethod(sym, nme.reify))
lazy val JavaUniverseClass = getClassIfDefined("scala.reflect.api.JavaUniverse") // defined in scala-reflect.jar, so we need to be careful
lazy val MirrorClass = getClassIfDefined("scala.reflect.api.Mirror") // defined in scala-reflect.jar, so we need to be careful
@@ -527,15 +542,18 @@ trait Definitions extends api.StandardDefinitions {
lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
- lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
- def MacroContextPrefix = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.prefix) else NoSymbol
- def MacroContextPrefixType = if (MacroContextClass != NoSymbol) getTypeMember(MacroContextClass, tpnme.PrefixType) else NoSymbol
- def MacroContextUniverse = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.universe) else NoSymbol
- def MacroContextMirror = if (MacroContextClass != NoSymbol) getMemberMethod(MacroContextClass, nme.mirror) else NoSymbol
- lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
+ lazy val MacroClass = getClassIfDefined("scala.reflect.macros.Macro") // defined in scala-reflect.jar, so we need to be careful
+ lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
+ def MacroContextPrefix = MacroContextClass.map(sym => getMemberMethod(sym, nme.prefix))
+ def MacroContextPrefixType = MacroContextClass.map(sym => getTypeMember(sym, tpnme.PrefixType))
+ def MacroContextUniverse = MacroContextClass.map(sym => getMemberMethod(sym, nme.universe))
+ def MacroContextExprClass = MacroContextClass.map(sym => getTypeMember(sym, tpnme.Expr))
+ def MacroContextWeakTypeTagClass = MacroContextClass.map(sym => getTypeMember(sym, tpnme.WeakTypeTag))
+ def MacroContextTreeType = MacroContextClass.map(sym => getTypeMember(sym, tpnme.Tree))
+ lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl]
- lazy val StringContextClass = requiredClass[scala.StringContext]
- def StringContext_f = getMemberMethod(StringContextClass, nme.f)
+ lazy val StringContextClass = requiredClass[scala.StringContext]
+ def StringContext_f = getMemberMethod(StringContextClass, nme.f)
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
@@ -553,7 +571,7 @@ trait Definitions extends api.StandardDefinitions {
// The given symbol represents either String.+ or StringAdd.+
def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+
- def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym
+ def isArrowAssoc(sym: Symbol) = sym.owner == ArrowAssocClass
// The given symbol is a method with the right name and signature to be a runnable java program.
def isJavaMainMethod(sym: Symbol) = (sym.name == nme.main) && (sym.info match {
@@ -563,12 +581,6 @@ trait Definitions extends api.StandardDefinitions {
// The given class has a main method.
def hasJavaMainMethod(sym: Symbol): Boolean =
(sym.tpe member nme.main).alternatives exists isJavaMainMethod
- def hasJavaMainMethod(path: String): Boolean =
- hasJavaMainMethod(getModuleIfDefined(path))
-
- def isOptionType(tp: Type) = tp.typeSymbol isSubClass OptionClass
- def isSomeType(tp: Type) = tp.typeSymbol eq SomeClass
- def isNoneType(tp: Type) = tp.typeSymbol eq NoneModule
// Product, Tuple, Function, AbstractFunction
private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[ClassSymbol] = {
@@ -591,7 +603,6 @@ trait Definitions extends api.StandardDefinitions {
/** Creators for TupleN, ProductN, FunctionN. */
def tupleType(elems: List[Type]) = aritySpecificType(TupleClass, elems)
- def productType(elems: List[Type]) = aritySpecificType(ProductClass, elems)
def functionType(formals: List[Type], restpe: Type) = aritySpecificType(FunctionClass, formals, restpe)
def abstractFunctionType(formals: List[Type], restpe: Type) = aritySpecificType(AbstractFunctionClass, formals, restpe)
@@ -606,14 +617,10 @@ trait Definitions extends api.StandardDefinitions {
case BooleanClass => nme.wrapBooleanArray
case UnitClass => nme.wrapUnitArray
case _ =>
- if ((elemtp <:< AnyRefClass.tpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
+ if ((elemtp <:< AnyRefTpe) && !isPhantomClass(elemtp.typeSymbol)) nme.wrapRefArray
else nme.genericWrapArray
}
- @deprecated("Use isTupleType", "2.10.0")
- def isTupleTypeOrSubtype(tp: Type) = isTupleType(tp)
-
- def tupleField(n: Int, j: Int) = getMemberValue(TupleClass(n), nme.productAccessorName(j))
// NOTE: returns true for NoSymbol since it's included in the TupleClass array -- is this intensional?
def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym)
def isProductNClass(sym: Symbol) = ProductClass contains sym
@@ -651,7 +658,13 @@ trait Definitions extends api.StandardDefinitions {
len <= MaxTupleArity && sym == TupleClass(len)
case _ => false
}
- def isTupleType(tp: Type) = isTupleTypeDirect(tp.normalize)
+ def isTupleType(tp: Type) = isTupleTypeDirect(tp.dealiasWiden)
+
+ def isMacroBundleType(tp: Type) = {
+ val isNonTrivial = tp != ErrorType && tp != NothingTpe && tp != NullTpe
+ val isMacroCompatible = MacroClass != NoSymbol && tp <:< MacroClass.tpe
+ isNonTrivial && isMacroCompatible
+ }
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
@@ -659,13 +672,8 @@ trait Definitions extends api.StandardDefinitions {
def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator)
def Product_productPrefix = getMemberMethod(ProductRootClass, nme.productPrefix)
def Product_canEqual = getMemberMethod(ProductRootClass, nme.canEqual_)
- // def Product_productElementName = getMemberMethod(ProductRootClass, nme.productElementName)
def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j))
- def productProj(n: Int, j: Int): TermSymbol = productProj(ProductClass(n), j)
-
- /** returns true if this type is exactly ProductN[T1,...,Tn], not some subclass */
- def isExactProductType(tp: Type): Boolean = isProductNClass(tp.typeSymbol)
/** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */
def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNClass match {
@@ -678,19 +686,17 @@ trait Definitions extends api.StandardDefinitions {
case _ => tp
}
- def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
- case RefinedType(p :: _, _) => p.normalize
+ def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match {
+ case RefinedType(p :: _, _) => p.dealiasWiden
case tp => tp
}
- def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
-
def abstractFunctionForFunctionType(tp: Type) = {
assert(isFunctionType(tp), tp)
abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
}
- def isFunctionType(tp: Type): Boolean = tp.normalize match {
+ def isFunctionType(tp: Type): Boolean = tp.dealiasWiden match {
case TypeRef(_, sym, args) if args.nonEmpty =>
val arity = args.length - 1 // -1 is the return type
arity <= MaxFunctionArity && sym == FunctionClass(arity)
@@ -703,8 +709,6 @@ trait Definitions extends api.StandardDefinitions {
(sym eq PartialFunctionClass) || (sym eq AbstractPartialFunctionClass)
}
- def isSeqType(tp: Type) = elementType(SeqClass, tp.normalize) != NoType
-
def elementType(container: Symbol, tp: Type): Type = tp match {
case TypeRef(_, `container`, arg :: Nil) => arg
case _ => NoType
@@ -717,14 +721,23 @@ trait Definitions extends api.StandardDefinitions {
def optionType(tp: Type) = appliedType(OptionClass, tp)
def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg)
def seqType(arg: Type) = appliedType(SeqClass, arg)
- def someType(tp: Type) = appliedType(SomeClass, tp)
- def StringArray = arrayType(StringClass.tpe)
- lazy val ObjectArray = arrayType(ObjectClass.tpe)
+ def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg)
- def ClassType(arg: Type) =
- if (phase.erasedTypes || forMSIL) ClassClass.tpe
- else appliedType(ClassClass, arg)
+ /** Can we tell by inspecting the symbol that it will never
+ * at any phase have type parameters?
+ */
+ def neverHasTypeParameters(sym: Symbol) = sym match {
+ case _: RefinementClassSymbol => true
+ case _: ModuleClassSymbol => true
+ case _: ImplClassSymbol => true
+ case _ =>
+ (
+ sym.isPrimitiveValueClass
+ || sym.isAnonymousClass
+ || sym.initialize.isMonomorphicType
+ )
+ }
def EnumType(sym: Symbol) =
// given (in java): "class A { enum E { VAL1 } }"
@@ -733,9 +746,6 @@ trait Definitions extends api.StandardDefinitions {
// - .linkedClassOfClass: the ClassSymbol of the enumeration (class E)
sym.owner.linkedClassOfClass.tpe
- def vmClassType(arg: Type): Type = ClassType(arg)
- def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!!
-
/** Given a class symbol C with type parameters T1, T2, ... Tn
* which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn,
* returns an existential type of the form
@@ -743,56 +753,18 @@ trait Definitions extends api.StandardDefinitions {
* C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }.
*/
def classExistentialType(clazz: Symbol): Type =
- newExistentialType(clazz.typeParams, clazz.tpe)
-
- /** Given type U, creates a Type representing Class[_ <: U].
- */
- def boundedClassType(upperBound: Type) =
- appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound))
-
- /** To avoid unchecked warnings on polymorphic classes, translate
- * a Foo[T] into a Foo[_] for use in the pattern matcher.
- */
- @deprecated("Use classExistentialType", "2.10.0")
- def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz)
+ existentialAbstraction(clazz.typeParams, clazz.tpe_*)
- //
- // .NET backend
- //
-
- lazy val ComparatorClass = getRequiredClass("scala.runtime.Comparator")
- // System.ValueType
- lazy val ValueTypeClass: ClassSymbol = getClassByName(sn.ValueType)
- // System.MulticastDelegate
- lazy val DelegateClass: ClassSymbol = getClassByName(sn.Delegate)
- var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported.
- // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType)
- // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _
- lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap()
-
- def isCorrespondingDelegate(delegateType: Type, functionType: Type): Boolean = {
- isSubType(delegateType, DelegateClass.tpe) &&
- (delegateType.member(nme.apply).tpe match {
- case MethodType(delegateParams, delegateReturn) =>
- isFunctionType(functionType) &&
- (functionType.normalize match {
- case TypeRef(_, _, args) =>
- (delegateParams.map(pt => {
- if (pt.tpe == AnyClass.tpe) definitions.ObjectClass.tpe else pt})
- ::: List(delegateReturn)) == args
- case _ => false
- })
- case _ => false
- })
- }
+ def unsafeClassExistentialType(clazz: Symbol): Type =
+ existentialAbstraction(clazz.unsafeTypeParams, clazz.tpe_*)
// members of class scala.Any
- lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL)
- lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, anyparam, booltype, FINAL)
- lazy val Any_equals = enterNewMethod(AnyClass, nme.equals_, anyparam, booltype)
- lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, inttype)
- lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, stringtype)
- lazy val Any_## = enterNewMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL)
+ lazy val Any_== = enterNewMethod(AnyClass, nme.EQ, AnyTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Any_!= = enterNewMethod(AnyClass, nme.NE, AnyTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Any_equals = enterNewMethod(AnyClass, nme.equals_, AnyTpe :: Nil, BooleanTpe)
+ lazy val Any_hashCode = enterNewMethod(AnyClass, nme.hashCode_, Nil, IntTpe)
+ lazy val Any_toString = enterNewMethod(AnyClass, nme.toString_, Nil, StringTpe)
+ lazy val Any_## = enterNewMethod(AnyClass, nme.HASHHASH, Nil, IntTpe, FINAL)
// Any_getClass requires special handling. The return type is determined on
// a per-call-site basis as if the function being called were actually:
@@ -804,7 +776,7 @@ trait Definitions extends api.StandardDefinitions {
// participation. At the "Any" level, the return type is Class[_] as it is in
// java.lang.Object. Java also special cases the return type.
lazy val Any_getClass = enterNewMethod(AnyClass, nme.getClass_, Nil, getMemberMethod(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED)
- lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype)
+ lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => BooleanTpe)
lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor)
lazy val primitiveGetClassMethods = Set[Symbol](Any_getClass, AnyVal_getClass) ++ (
@@ -834,7 +806,7 @@ trait Definitions extends api.StandardDefinitions {
else {
val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams)
val upperBound = (
- if (isPhantomClass(sym)) AnyClass.tpe
+ if (isPhantomClass(sym)) AnyTpe
else if (sym.isLocalClass) erasure.intersectionDominator(tp.parents)
else tp.widen
)
@@ -860,12 +832,7 @@ trait Definitions extends api.StandardDefinitions {
else
x :: removeRedundantObjects(xs)
}
- /** Order a list of types with non-trait classes before others. */
- def classesFirst(tps: List[Type]): List[Type] = {
- val (classes, others) = tps partition (t => t.typeSymbol.isClass && !t.typeSymbol.isTrait)
- if (classes.isEmpty || others.isEmpty || (tps startsWith classes)) tps
- else classes ::: others
- }
+
/** The following transformations applied to a list of parents.
* If any parent is a class/trait, all parents which normalize to
* Object are discarded. Otherwise, all parents which normalize
@@ -893,27 +860,23 @@ trait Definitions extends api.StandardDefinitions {
def parentsString(parents: List[Type]) =
normalizedParents(parents) mkString " with "
- def typeParamsString(tp: Type) = tp match {
- case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]")
- case _ => ""
- }
def valueParamsString(tp: Type) = tp match {
case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")")
case _ => ""
}
// members of class java.lang.{ Object, String }
- lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
- lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
- lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL)
- lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL)
- lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL)
- lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype)
- lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor)
+ lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, IntTpe, FINAL)
+ lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_!= = enterNewMethod(ObjectClass, nme.NE, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_eq = enterNewMethod(ObjectClass, nme.eq, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_ne = enterNewMethod(ObjectClass, nme.ne, AnyRefTpe :: Nil, BooleanTpe, FINAL)
+ lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_ => BooleanTpe)
+ lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC | ARTIFACT)(_.typeConstructor)
lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps =>
(Some(List(tps.head.typeConstructor)), tps.head.typeConstructor)
)
- lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL)
+ lazy val String_+ = enterNewMethod(StringClass, nme.raw.PLUS, AnyTpe :: Nil, StringTpe, FINAL)
def Object_getClass = getMemberMethod(ObjectClass, nme.getClass_)
def Object_clone = getMemberMethod(ObjectClass, nme.clone_)
@@ -933,12 +896,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean)
- lazy val BoxedByteClass = requiredClass[java.lang.Byte]
- lazy val BoxedShortClass = requiredClass[java.lang.Short]
- lazy val BoxedIntClass = requiredClass[java.lang.Integer]
- lazy val BoxedLongClass = requiredClass[java.lang.Long]
- lazy val BoxedFloatClass = requiredClass[java.lang.Float]
- lazy val BoxedDoubleClass = requiredClass[java.lang.Double]
lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
@@ -959,7 +916,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val ImplicitNotFoundClass = requiredClass[scala.annotation.implicitNotFound]
lazy val MigrationAnnotationClass = requiredClass[scala.annotation.migration]
lazy val ScalaStrictFPAttr = requiredClass[scala.annotation.strictfp]
- lazy val SerializableAttr = requiredClass[scala.annotation.serializable] // @serializable is deprecated
lazy val SwitchClass = requiredClass[scala.annotation.switch]
lazy val TailrecClass = requiredClass[scala.annotation.tailrec]
lazy val VarargsClass = requiredClass[scala.annotation.varargs]
@@ -968,7 +924,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
- lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
@@ -993,8 +948,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val GetterTargetClass = requiredClass[meta.getter]
lazy val ParamTargetClass = requiredClass[meta.param]
lazy val SetterTargetClass = requiredClass[meta.setter]
- lazy val ClassTargetClass = requiredClass[meta.companionClass]
lazy val ObjectTargetClass = requiredClass[meta.companionObject]
+ lazy val ClassTargetClass = requiredClass[meta.companionClass]
lazy val MethodTargetClass = requiredClass[meta.companionMethod] // TODO: module, moduleClass? package, packageObject?
lazy val LanguageFeatureAnnot = requiredClass[meta.languageFeature]
@@ -1013,11 +968,21 @@ trait Definitions extends api.StandardDefinitions {
// Trying to allow for deprecated locations
sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol)
)
- lazy val metaAnnotations = Set[Symbol](
- FieldTargetClass, ParamTargetClass,
- GetterTargetClass, SetterTargetClass,
- BeanGetterTargetClass, BeanSetterTargetClass
- )
+ lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet
+
+ // According to the scala.annotation.meta package object:
+ // * By default, annotations on (`val`-, `var`- or plain) constructor parameters
+ // * end up on the parameter, not on any other entity. Annotations on fields
+ // * by default only end up on the field.
+ def defaultAnnotationTarget(t: Tree): Symbol = t match {
+ case ClassDef(_, _, _, _) => ClassTargetClass
+ case ModuleDef(_, _, _) => ObjectTargetClass
+ case vd @ ValDef(_, _, _, _) if vd.symbol.isParamAccessor => ParamTargetClass
+ case vd @ ValDef(_, _, _, _) if vd.symbol.isValueParameter => ParamTargetClass
+ case ValDef(_, _, _, _) => FieldTargetClass
+ case DefDef(_, _, _, _, _, _) => MethodTargetClass
+ case _ => GetterTargetClass
+ }
lazy val AnnotationDefaultAttr: ClassSymbol = {
val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe))
@@ -1039,7 +1004,6 @@ trait Definitions extends api.StandardDefinitions {
def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
def termMember(owner: Symbol, name: String): Symbol = owner.info.member(newTermName(name))
- def typeMember(owner: Symbol, name: String): Symbol = owner.info.member(newTypeName(name))
def findNamedMember(fullName: Name, root: Symbol): Symbol = {
val segs = nme.segments(fullName.toString, fullName.isTermName)
@@ -1079,7 +1043,6 @@ trait Definitions extends api.StandardDefinitions {
}
}
def getMemberClass(owner: Symbol, name: Name): ClassSymbol = {
- val y = getMember(owner, name.toTypeName)
getMember(owner, name.toTypeName) match {
case x: ClassSymbol => x
case _ => fatalMissingSymbol(owner, name, "member class")
@@ -1107,16 +1070,13 @@ trait Definitions extends api.StandardDefinitions {
def getDeclIfDefined(owner: Symbol, name: Name): Symbol =
owner.info.nonPrivateDecl(name)
- def packageExists(packageName: String): Boolean =
- getModuleIfDefined(packageName).isPackage
-
private def newAlias(owner: Symbol, name: TypeName, alias: Type): AliasTypeSymbol =
owner.newAliasType(name) setInfoAndEnter alias
private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): ClassSymbol = {
val clazz = enterNewClass(ScalaPackageClass, name, Nil)
val tparam = clazz.newSyntheticTypeParam("T0", flags)
- val parents = List(AnyRefClass.tpe, parentFn(tparam))
+ val parents = List(AnyRefTpe, parentFn(tparam))
clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz))
}
@@ -1141,7 +1101,6 @@ trait Definitions extends api.StandardDefinitions {
newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head)))
}
- lazy val boxedClassValues = boxedClass.values.toSet[Symbol]
lazy val isUnbox = unboxMethod.values.toSet[Symbol]
lazy val isBox = boxMethod.values.toSet[Symbol]
@@ -1158,8 +1117,7 @@ trait Definitions extends api.StandardDefinitions {
AnyValClass,
NullClass,
NothingClass,
- SingletonClass,
- EqualsPatternClass
+ SingletonClass
)
/** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
lazy val syntheticCoreMethods = List(
@@ -1201,8 +1159,6 @@ trait Definitions extends api.StandardDefinitions {
/** Is symbol a value class? */
def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym
- def isNonUnitValueClass(sym: Symbol) = isPrimitiveValueClass(sym) && (sym != UnitClass)
- def isSpecializableClass(sym: Symbol) = isPrimitiveValueClass(sym) || (sym == AnyRefClass)
def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol)
/** Is symbol a boxed value class, e.g. java.lang.Integer? */
@@ -1219,7 +1175,7 @@ trait Definitions extends api.StandardDefinitions {
/** Is type's symbol a numeric value class? */
def isNumericValueType(tp: Type): Boolean = tp match {
case TypeRef(_, sym, _) => isNumericValueClass(sym)
- case _ => false
+ case _ => false
}
// todo: reconcile with javaSignature!!!
@@ -1231,10 +1187,10 @@ trait Definitions extends api.StandardDefinitions {
}
def flatNameString(sym: Symbol, separator: Char): String =
if (sym == NoSymbol) "" // be more resistant to error conditions, e.g. neg/t3222.scala
- else if (sym.owner.isPackageClass) sym.javaClassName
+ else if (sym.isTopLevel) sym.javaClassName
else flatNameString(sym.owner, separator) + nme.NAME_JOIN_STRING + sym.simpleName
def signature1(etp: Type): String = {
- if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.normalize.typeArgs.head))
+ if (etp.typeSymbol == ArrayClass) "[" + signature1(erasure(etp.dealiasWiden.typeArgs.head))
else if (isPrimitiveValueClass(etp.typeSymbol)) abbrvTag(etp.typeSymbol).toString()
else "L" + flatNameString(etp.typeSymbol, '/') + ";"
}
@@ -1243,49 +1199,11 @@ trait Definitions extends api.StandardDefinitions {
else flatNameString(etp.typeSymbol, '.')
}
- /** Surgery on the value classes. Without this, AnyVals defined in source
- * files end up with an AnyRef parent. It is likely there is a better way
- * to evade that AnyRef.
- */
- private def setParents(sym: Symbol, parents: List[Type]): Symbol = sym.rawInfo match {
- case ClassInfoType(_, scope, clazz) =>
- sym setInfo ClassInfoType(parents, scope, clazz)
- case _ =>
- sym
- }
-
def init() {
if (isInitialized) return
// force initialization of every symbol that is synthesized or hijacked by the compiler
- val forced = symbolsNotPresentInBytecode
+ val _ = symbolsNotPresentInBytecode
isInitialized = true
} //init
-
- var nbScalaCallers: Int = 0
- def newScalaCaller(delegateType: Type): MethodSymbol = {
- assert(forMSIL, "scalaCallers can only be created if target is .NET")
- // object: reference to object on which to call (scala-)method
- val paramTypes: List[Type] = List(ObjectClass.tpe)
- val name = newTermName("$scalaCaller$$" + nbScalaCallers)
- // tparam => resultType, which is the resultType of PolyType, i.e. the result type after applying the
- // type parameter =-> a MethodType in this case
- // TODO: set type bounds manually (-> MulticastDelegate), see newTypeParam
- val newCaller = enterNewMethod(DelegateClass, name, paramTypes, delegateType, FINAL | STATIC)
- // val newCaller = newPolyMethod(DelegateClass, name,
- // tparam => MethodType(paramTypes, tparam.typeConstructor)) setFlag (FINAL | STATIC)
- Delegate_scalaCallers = Delegate_scalaCallers ::: List(newCaller)
- nbScalaCallers += 1
- newCaller
- }
-
- // def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol, delType: Type) {
- // assert(Delegate_scalaCallers contains scalaCaller)
- // Delegate_scalaCallerInfos += (scalaCaller -> (methSym, delType))
- // }
-
- def addScalaCallerInfo(scalaCaller: Symbol, methSym: Symbol) {
- assert(Delegate_scalaCallers contains scalaCaller)
- Delegate_scalaCallerTargets += (scalaCaller -> methSym)
- }
}
}
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 3bcb793926..6251849182 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -3,11 +3,11 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
-import util._
/** The name of this trait defines the eventual intent better than
* it does the initial contents.
diff --git a/src/reflect/scala/reflect/internal/FatalError.scala b/src/reflect/scala/reflect/internal/FatalError.scala
index a084fc24f3..08a9a635af 100644
--- a/src/reflect/scala/reflect/internal/FatalError.scala
+++ b/src/reflect/scala/reflect/internal/FatalError.scala
@@ -2,5 +2,6 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect.internal
+package scala
+package reflect.internal
case class FatalError(msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala
index 6a3b6870a0..961adb2c57 100644
--- a/src/reflect/scala/reflect/internal/FlagSets.scala
+++ b/src/reflect/scala/reflect/internal/FlagSets.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import scala.language.implicitConversions
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 5ebe02d95d..b8e3407824 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
@@ -116,8 +117,21 @@ class ModifierFlags {
final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer
final val PRESUPER = 1L << 37 // value is evaluated before super call
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
- // ARTIFACT at #46 in 2.11+
- final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
+ final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
+ final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
+
+ /** Symbols which are marked ARTIFACT. (Expand this list?)
+ *
+ * - $outer fields and accessors
+ * - super accessors
+ * - protected accessors
+ * - lazy local accessors
+ * - bridge methods
+ * - default argument getters
+ * - evaluation-order preserving locals for right-associative and out-of-order named arguments
+ * - catch-expression storing vals
+ * - anything else which feels a setFlag(ARTIFACT)
+ */
// Overridden.
def flagToString(flag: Long): String = ""
@@ -163,11 +177,10 @@ class Flags extends ModifierFlags {
final val VBRIDGE = 1L << 42 // symbol is a varargs bridge
final val VARARGS = 1L << 43 // symbol is a Java-style varargs method
- final val TRIEDCOOKING = 1L << 44 // ``Cooking'' has been tried on this symbol
- // A Java method's type is ``cooked'' by transforming raw types to existentials
+ final val TRIEDCOOKING = 1L << 44 // `Cooking` has been tried on this symbol
+ // A Java method's type is `cooked` by transforming raw types to existentials
final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED
- final val ARTIFACT = 1L << 46 // symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode
// ------- shift definitions -------------------------------------------------------
@@ -250,7 +263,7 @@ class Flags extends ModifierFlags {
/** These modifiers appear in TreePrinter output. */
final val PrintableFlags =
ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO |
- ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED
+ ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT
/** When a symbol for a field is created, only these flags survive
* from Modifiers. Others which may be applied at creation time are:
@@ -296,7 +309,11 @@ class Flags extends ModifierFlags {
assert((OverloadedFlagsMask & FlagsNotPickled) == 0, flagsToString(OverloadedFlagsMask & FlagsNotPickled))
/** These flags are pickled */
- final val PickledFlags = InitialFlags & ~FlagsNotPickled
+ final val PickledFlags = (
+ (InitialFlags & ~FlagsNotPickled)
+ | notPRIVATE // for value class constructors (SI-6601), and private members referenced
+ // in @inline-marked methods publicized in SuperAccessors (see SI-6608, e6b4204604)
+ )
/** If we have a top-level class or module
* and someone asks us for a flag not in TopLevelPickledFlags,
@@ -422,8 +439,8 @@ class Flags extends ModifierFlags {
case VARARGS => "<varargs>" // (1L << 43)
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
- case 0x400000000000L => "" // (1L << 46)
- case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
+ case ARTIFACT => "<artifact>" // (1L << 46)
+ case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
case 0x4000000000000L => "" // (1L << 50)
@@ -497,4 +514,4 @@ class Flags extends ModifierFlags {
final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray
}
-object Flags extends Flags { }
+object Flags extends Flags
diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala
index 4a3663b8ea..420b5fef81 100644
--- a/src/reflect/scala/reflect/internal/HasFlags.scala
+++ b/src/reflect/scala/reflect/internal/HasFlags.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -158,13 +159,14 @@ trait HasFlags {
else nonAccess + " " + access
}
+ // Guess this can't be deprecated seeing as it's in the reflect API.
+ def isParameter = hasFlag(PARAM)
+
// Backward compat section
@deprecated( "Use isTrait", "2.10.0")
def hasTraitFlag = hasFlag(TRAIT)
@deprecated("Use hasDefault", "2.10.0")
def hasDefaultFlag = hasFlag(DEFAULTPARAM)
- @deprecated("Use isValueParameter or isTypeParameter", "2.10.0")
- def isParameter = hasFlag(PARAM)
@deprecated("Use flagString", "2.10.0")
def defaultFlagString = flagString
@deprecated("Use flagString(mask)", "2.10.0")
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 43902c1930..f8584ac9b0 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -1,21 +1,22 @@
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.mutable.WeakHashMap
import scala.ref.WeakReference
// SI-6241: move importers to a mirror
-trait Importers extends api.Importers { self: SymbolTable =>
+trait Importers extends api.Importers { to: SymbolTable =>
def mkImporter(from0: api.Universe): Importer { val from: from0.type } = (
- if (self eq from0) {
+ if (to eq from0) {
new Importer {
val from = from0
- val reverse = this.asInstanceOf[from.Importer{ val from: self.type }]
- def importSymbol(sym: from.Symbol) = sym.asInstanceOf[self.Symbol]
- def importType(tpe: from.Type) = tpe.asInstanceOf[self.Type]
- def importTree(tree: from.Tree) = tree.asInstanceOf[self.Tree]
- def importPosition(pos: from.Position) = pos.asInstanceOf[self.Position]
+ val reverse = this.asInstanceOf[from.Importer{ val from: to.type }]
+ def importSymbol(their: from.Symbol) = their.asInstanceOf[to.Symbol]
+ def importType(their: from.Type) = their.asInstanceOf[to.Type]
+ def importTree(their: from.Tree) = their.asInstanceOf[to.Tree]
+ def importPosition(their: from.Position) = their.asInstanceOf[to.Position]
}
} else {
// todo. fix this loophole
@@ -28,8 +29,8 @@ trait Importers extends api.Importers { self: SymbolTable =>
val from: SymbolTable
- protected lazy val symMap = new Cache[from.Symbol, Symbol]()
- protected lazy val tpeMap = new Cache[from.Type, Type]()
+ protected lazy val symMap = new Cache[from.Symbol, to.Symbol]()
+ protected lazy val tpeMap = new Cache[from.Type, to.Type]()
protected class Cache[K <: AnyRef, V <: AnyRef] extends WeakHashMap[K, WeakReference[V]] {
def weakGet(key: K): Option[V] = this get key flatMap WeakReference.unapply
def weakUpdate(key: K, value: V) = this.update(key, WeakReference(value))
@@ -49,158 +50,162 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
object reverse extends from.StandardImporter {
- val from: self.type = self
+ val from: to.type = to
// FIXME this and reverse should be constantly kept in sync
// not just synced once upon the first usage of reverse
- for ((fromsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(fromsym)))
- for ((fromtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(fromtpe)))
+ for ((theirsym, WeakReference(mysym)) <- StandardImporter.this.symMap) symMap += ((mysym, WeakReference(theirsym)))
+ for ((theirtpe, WeakReference(mytpe)) <- StandardImporter.this.tpeMap) tpeMap += ((mytpe, WeakReference(theirtpe)))
}
- // todo. careful import of positions
- def importPosition(pos: from.Position): Position =
- pos.asInstanceOf[Position]
+ // ============== SYMBOLS ==============
+
+ protected def recreatedSymbolCompleter(my: to.Symbol, their: from.Symbol) = {
+ // we lock the symbol that is imported for a very short period of time
+ // i.e. only for when type parameters of the symbol are being imported
+ // the lock is used to communicate to the recursive importSymbol calls
+ // that type parameters need to be created from scratch
+ // because otherwise type parameters are imported by looking into owner.typeParams
+ // which is obviously unavailable while the completer is being created
+ try {
+ my setFlag Flags.LOCKED
+ val mytypeParams = their.typeParams map importSymbol
+ new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
+ override def complete(my: to.Symbol): Unit = {
+ val theirCore = their.info match {
+ case from.PolyType(_, core) => core
+ case core => core
+ }
+ my setInfo GenPolyType(mytypeParams, importType(theirCore))
+ my setAnnotations (their.annotations map importAnnotationInfo)
+ }
+ }
+ } finally {
+ my resetFlag Flags.LOCKED
+ }
+ }
+
+ protected def recreateSymbol(their: from.Symbol): to.Symbol = {
+ val myowner = importSymbol(their.owner)
+ val mypos = importPosition(their.pos)
+ val myname = importName(their.name)
+ val myflags = their.flags
+ def linkReferenced(my: TermSymbol, their: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
+ symMap.weakUpdate(their, my)
+ my.referenced = op(their.referenced)
+ my
+ }
+ val my = their match {
+ case their: from.MethodSymbol =>
+ linkReferenced(myowner.newMethod(myname.toTermName, mypos, myflags), their, importSymbol)
+ case their: from.ModuleSymbol =>
+ val ret = linkReferenced(myowner.newModuleSymbol(myname.toTermName, mypos, myflags), their, importSymbol)
+ ret.associatedFile = their.associatedFile
+ ret
+ case their: from.FreeTermSymbol =>
+ newFreeTermSymbol(myname.toTermName, their.value, their.flags, their.origin) setInfo importType(their.info)
+ case their: from.FreeTypeSymbol =>
+ newFreeTypeSymbol(myname.toTypeName, their.flags, their.origin)
+ case their: from.TermSymbol =>
+ linkReferenced(myowner.newValue(myname.toTermName, mypos, myflags), their, importSymbol)
+ case their: from.TypeSkolem =>
+ val origin = their.unpackLocation match {
+ case null => null
+ case theirloc: from.Tree => importTree(theirloc)
+ case theirloc: from.Symbol => importSymbol(theirloc)
+ }
+ myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
+ case their: from.ModuleClassSymbol =>
+ val my = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(their, my)
+ my.sourceModule = importSymbol(their.sourceModule)
+ my
+ case their: from.ClassSymbol =>
+ val my = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
+ symMap.weakUpdate(their, my)
+ if (their.thisSym != their) {
+ my.typeOfThis = importType(their.typeOfThis)
+ my.thisSym setName importName(their.thisSym.name)
+ }
+ my.associatedFile = their.associatedFile
+ my
+ case their: from.TypeSymbol =>
+ myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
+ }
+ symMap.weakUpdate(their, my)
+ my setInfo recreatedSymbolCompleter(my, their)
+ }
- def importSymbol(sym0: from.Symbol): Symbol = {
- def doImport(sym: from.Symbol): Symbol =
- symMap weakGet sym match {
+ def importSymbol(their0: from.Symbol): Symbol = {
+ def cachedRecreateSymbol(their: from.Symbol): Symbol =
+ symMap weakGet their match {
case Some(result) => result
- case _ =>
- val myowner = importSymbol(sym.owner)
- val mypos = importPosition(sym.pos)
- val myname = importName(sym.name).toTermName
- val myflags = sym.flags
- def linkReferenced(mysym: TermSymbol, x: from.TermSymbol, op: from.Symbol => Symbol): Symbol = {
- symMap.weakUpdate(x, mysym)
- mysym.referenced = op(x.referenced)
- mysym
- }
- val mysym = sym match {
- case x: from.MethodSymbol =>
- linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol)
- case x: from.ModuleSymbol =>
- linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol)
- case x: from.FreeTermSymbol =>
- newFreeTermSymbol(importName(x.name).toTermName, x.value, x.flags, x.origin) setInfo importType(x.info)
- case x: from.FreeTypeSymbol =>
- newFreeTypeSymbol(importName(x.name).toTypeName, x.flags, x.origin)
- case x: from.TermSymbol =>
- linkReferenced(myowner.newValue(myname, mypos, myflags), x, importSymbol)
- case x: from.TypeSkolem =>
- val origin = x.unpackLocation match {
- case null => null
- case y: from.Tree => importTree(y)
- case y: from.Symbol => importSymbol(y)
- }
- myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags)
- case x: from.ModuleClassSymbol =>
- val mysym = myowner.newModuleClass(myname.toTypeName, mypos, myflags)
- symMap.weakUpdate(x, mysym)
- mysym.sourceModule = importSymbol(x.sourceModule)
- mysym
- case x: from.ClassSymbol =>
- val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags)
- symMap.weakUpdate(x, mysym)
- if (sym.thisSym != sym) {
- mysym.typeOfThis = importType(sym.typeOfThis)
- mysym.thisSym setName importName(sym.thisSym.name)
- }
- mysym
- case x: from.TypeSymbol =>
- myowner.newTypeSymbol(myname.toTypeName, mypos, myflags)
- }
- symMap.weakUpdate(sym, mysym)
- mysym setFlag Flags.LOCKED
- mysym setInfo {
- val mytypeParams = sym.typeParams map importSymbol
- new LazyPolyType(mytypeParams) with FlagAgnosticCompleter {
- override def complete(s: Symbol) {
- val result = sym.info match {
- case from.PolyType(_, res) => res
- case result => result
- }
- s setInfo GenPolyType(mytypeParams, importType(result))
- s setAnnotations (sym.annotations map importAnnotationInfo)
- }
- }
- }
- mysym resetFlag Flags.LOCKED
- } // end doImport
+ case _ => recreateSymbol(their)
+ }
- def importOrRelink: Symbol = {
- val sym = sym0 // makes sym visible in the debugger
- if (sym == null)
+ def recreateOrRelink: Symbol = {
+ val their = their0 // makes their visible in the debugger
+ if (their == null)
null
- else if (sym == from.NoSymbol)
+ else if (their == from.NoSymbol)
NoSymbol
- else if (sym.isRoot)
+ else if (their.isRoot)
rootMirror.RootClass // !!! replace with actual mirror when we move importers to the mirror
else {
- val name = sym.name
- val owner = sym.owner
- var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType
- var existing = scope.decl(name)
- if (sym.isModuleClass)
- existing = existing.moduleClass
-
- if (!existing.exists) scope = from.NoType
-
- val myname = importName(name)
- val myowner = importSymbol(owner)
- val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
- var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods
- if (sym.isModuleClass)
- myexisting = importSymbol(sym.sourceModule).moduleClass
-
- if (!sym.isOverloaded && myexisting.isOverloaded) {
- myexisting =
- if (sym.isMethod) {
- val localCopy = doImport(sym)
- myexisting filter (_.tpe matches localCopy.tpe)
- } else {
- myexisting filter (!_.isMethod)
+ val isModuleClass = their.isModuleClass
+ val isTparam = their.isTypeParameter && their.paramPos >= 0
+ val isOverloaded = their.isOverloaded
+
+ var theirscope = if (their.owner.isClass && !their.owner.isRefinementClass) their.owner.info else from.NoType
+ val theirexisting = if (isModuleClass) theirscope.decl(their.name).moduleClass else theirscope.decl(their.name)
+ if (!theirexisting.exists) theirscope = from.NoType
+
+ val myname = importName(their.name)
+ val myowner = importSymbol(their.owner)
+ val myscope = if (theirscope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType
+ val myexisting = {
+ if (isModuleClass) importSymbol(their.sourceModule).moduleClass
+ else if (isTparam) (if (myowner hasFlag Flags.LOCKED) NoSymbol else myowner.typeParams(their.paramPos))
+ else if (isOverloaded) myowner.newOverloaded(myowner.thisType, their.alternatives map importSymbol)
+ else {
+ def disambiguate(my: Symbol) = {
+ val result =
+ if (their.isMethod) {
+ val localCopy = cachedRecreateSymbol(their)
+ my filter (_.tpe matches localCopy.tpe)
+ } else {
+ my filter (!_.isMethod)
+ }
+ assert(!result.isOverloaded,
+ "import failure: cannot determine unique overloaded method alternative from\n "+
+ (result.alternatives map (_.defString) mkString "\n")+"\n that matches "+their+":"+their.tpe)
+ result
}
- assert(!myexisting.isOverloaded,
- "import failure: cannot determine unique overloaded method alternative from\n "+
- (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe)
- }
- val mysym = {
- if (sym.isOverloaded) {
- myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol)
- } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) {
- assert(myowner.typeParams.length > sym.paramPos,
- "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+
- myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+
- sym.owner+from.typeParamsString(sym.owner.info))
- myowner.typeParams(sym.paramPos)
- } else {
- if (myexisting != NoSymbol) {
- myexisting
- } else {
- val mysym = doImport(sym)
-
- if (myscope != NoType) {
- assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting)
- myowner.info.decls enter mysym
- }
-
- mysym
- }
+ val myexisting = if (myscope != NoType) myscope.decl(myname) else NoSymbol
+ if (myexisting.isOverloaded) disambiguate(myexisting)
+ else myexisting
}
}
- mysym
+ myexisting.orElse {
+ val my = cachedRecreateSymbol(their)
+ if (myscope != NoType) {
+ assert(myscope.decls.lookup(myname) == NoSymbol, myname+" "+myscope.decl(myname)+" "+myexisting)
+ myscope.decls enter my
+ }
+ my
+ }
}
- } // end importOrRelink
+ } // end recreateOrRelink
- val sym = sym0
- symMap.weakGet(sym) match {
+ val their = their0
+ symMap.weakGet(their) match {
case Some(result) => result
case None =>
pendingSyms += 1
try {
- val result = importOrRelink
- symMap.weakUpdate(sym, result)
+ val result = recreateOrRelink
+ symMap.weakUpdate(their, result)
result
} finally {
pendingSyms -= 1
@@ -209,71 +214,70 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
}
- def importType(tpe: from.Type): Type = {
- def doImport(tpe: from.Type): Type = tpe match {
- case from.TypeRef(pre, sym, args) =>
- TypeRef(importType(pre), importSymbol(sym), args map importType)
- case from.ThisType(clazz) =>
- ThisType(importSymbol(clazz))
- case from.SingleType(pre, sym) =>
- SingleType(importType(pre), importSymbol(sym))
- case from.MethodType(params, restpe) =>
- MethodType(params map importSymbol, importType(restpe))
- case from.PolyType(tparams, restpe) =>
- PolyType(tparams map importSymbol, importType(restpe))
- case from.NullaryMethodType(restpe) =>
- NullaryMethodType(importType(restpe))
- case from.ConstantType(constant @ from.Constant(_)) =>
- ConstantType(importConstant(constant))
- case from.SuperType(thistpe, supertpe) =>
- SuperType(importType(thistpe), importType(supertpe))
- case from.TypeBounds(lo, hi) =>
- TypeBounds(importType(lo), importType(hi))
- case from.BoundedWildcardType(bounds) =>
- BoundedWildcardType(importTypeBounds(bounds))
- case from.ClassInfoType(parents, decls, clazz) =>
- val myclazz = importSymbol(clazz)
- val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
- val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
- myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
- decls foreach importSymbol // will enter itself into myclazz
- myclazzTpe
- case from.RefinedType(parents, decls) =>
- RefinedType(parents map importType, importScope(decls), importSymbol(tpe.typeSymbol))
- case from.ExistentialType(tparams, restpe) =>
- newExistentialType(tparams map importSymbol, importType(restpe))
- case from.OverloadedType(pre, alts) =>
- OverloadedType(importType(pre), alts map importSymbol)
- case from.AntiPolyType(pre, targs) =>
- AntiPolyType(importType(pre), targs map importType)
- case x: from.TypeVar =>
- TypeVar(importType(x.origin), importTypeConstraint(x.constr), x.typeArgs map importType, x.params map importSymbol)
- case from.NotNullType(tpe) =>
- NotNullType(importType(tpe))
- case from.AnnotatedType(annots, tpe, selfsym) =>
- AnnotatedType(annots map importAnnotationInfo, importType(tpe), importSymbol(selfsym))
- case from.ErrorType =>
- ErrorType
- case from.WildcardType =>
- WildcardType
- case from.NoType =>
- NoType
- case from.NoPrefix =>
- NoPrefix
- case null =>
- null
- } // end doImport
-
- def importOrRelink: Type =
- doImport(tpe)
+ // ============== TYPES ==============
+
+ def recreateType(their: from.Type): Type = their match {
+ case from.TypeRef(pre, sym, args) =>
+ TypeRef(importType(pre), importSymbol(sym), args map importType)
+ case from.ThisType(clazz) =>
+ ThisType(importSymbol(clazz))
+ case from.SingleType(pre, sym) =>
+ SingleType(importType(pre), importSymbol(sym))
+ case from.MethodType(params, result) =>
+ MethodType(params map importSymbol, importType(result))
+ case from.PolyType(tparams, result) =>
+ PolyType(tparams map importSymbol, importType(result))
+ case from.NullaryMethodType(result) =>
+ NullaryMethodType(importType(result))
+ case from.ConstantType(constant @ from.Constant(_)) =>
+ ConstantType(importConstant(constant))
+ case from.SuperType(thistpe, supertpe) =>
+ SuperType(importType(thistpe), importType(supertpe))
+ case from.TypeBounds(lo, hi) =>
+ TypeBounds(importType(lo), importType(hi))
+ case from.BoundedWildcardType(bounds) =>
+ BoundedWildcardType(importType(bounds).asInstanceOf[TypeBounds])
+ case from.ClassInfoType(parents, decls, clazz) =>
+ val myclazz = importSymbol(clazz)
+ val myscope = if (myclazz.isPackageClass) newPackageScope(myclazz) else newScope
+ val myclazzTpe = ClassInfoType(parents map importType, myscope, myclazz)
+ myclazz setInfo GenPolyType(myclazz.typeParams, myclazzTpe) // needed so that newly created symbols find their scope
+ decls foreach importSymbol // will enter itself into myclazz
+ myclazzTpe
+ case from.RefinedType(parents, decls) =>
+ RefinedType(parents map importType, importScope(decls), importSymbol(their.typeSymbol))
+ case from.ExistentialType(tparams, result) =>
+ newExistentialType(tparams map importSymbol, importType(result))
+ case from.OverloadedType(pre, alts) =>
+ OverloadedType(importType(pre), alts map importSymbol)
+ case from.AntiPolyType(pre, targs) =>
+ AntiPolyType(importType(pre), targs map importType)
+ case their: from.TypeVar =>
+ val myconstr = new TypeConstraint(their.constr.loBounds map importType, their.constr.hiBounds map importType)
+ myconstr.inst = importType(their.constr.inst)
+ TypeVar(importType(their.origin), myconstr, their.typeArgs map importType, their.params map importSymbol)
+ case from.AnnotatedType(annots, result, selfsym) =>
+ AnnotatedType(annots map importAnnotationInfo, importType(result), importSymbol(selfsym))
+ case from.ErrorType =>
+ ErrorType
+ case from.WildcardType =>
+ WildcardType
+ case from.NoType =>
+ NoType
+ case from.NoPrefix =>
+ NoPrefix
+ case null =>
+ null
+ }
- tpeMap.weakGet(tpe) match {
+ def importType(their: from.Type): Type = {
+ tpeMap.weakGet(their) match {
case Some(result) => result
case None =>
pendingTpes += 1
try {
- val result = importOrRelink
- tpeMap.weakUpdate(tpe, result)
+ val result = recreateType(their)
+ tpeMap.weakUpdate(their, result)
result
} finally {
pendingTpes -= 1
@@ -282,7 +286,136 @@ trait Importers extends api.Importers { self: SymbolTable =>
}
}
- def importTypeBounds(bounds: from.TypeBounds) = importType(bounds).asInstanceOf[TypeBounds]
+ // ============== TREES ==============
+
+ def recreatedTreeCompleter(their: from.Tree, my: to.Tree): Unit = {
+ if (their.canHaveAttrs) {
+ if (my.hasSymbolField) my.symbol = importSymbol(their.symbol)
+ my.pos = importPosition(their.pos)
+ (their, my) match {
+ case (their: from.TypeTree, my: to.TypeTree) =>
+ if (their.wasEmpty) my.defineType(importType(their.tpe)) else my.setType(importType(their.tpe))
+ case (_, _) =>
+ my.tpe = importType(their.tpe)
+ }
+ }
+ }
+
+ def recreateTree(their: from.Tree): to.Tree = their match {
+ case from.ClassDef(mods, name, tparams, impl) =>
+ new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
+ case from.PackageDef(pid, stats) =>
+ new PackageDef(importRefTree(pid), stats map importTree)
+ case from.ModuleDef(mods, name, impl) =>
+ new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
+ case from.emptyValDef =>
+ emptyValDef
+ case from.pendingSuperCall =>
+ pendingSuperCall
+ case from.ValDef(mods, name, tpt, rhs) =>
+ new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
+ case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
+ case from.TypeDef(mods, name, tparams, rhs) =>
+ new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
+ case from.LabelDef(name, params, rhs) =>
+ new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
+ case from.Import(expr, selectors) =>
+ new Import(importTree(expr), selectors map importImportSelector)
+ case from.Template(parents, self, body) =>
+ new Template(parents map importTree, importValDef(self), body map importTree)
+ case from.Block(stats, expr) =>
+ new Block(stats map importTree, importTree(expr))
+ case from.CaseDef(pat, guard, body) =>
+ new CaseDef(importTree(pat), importTree(guard), importTree(body))
+ case from.Alternative(trees) =>
+ new Alternative(trees map importTree)
+ case from.Star(elem) =>
+ new Star(importTree(elem))
+ case from.Bind(name, body) =>
+ new Bind(importName(name), importTree(body))
+ case from.UnApply(fun, args) =>
+ new UnApply(importTree(fun), args map importTree)
+ case from.ArrayValue(elemtpt ,elems) =>
+ new ArrayValue(importTree(elemtpt), elems map importTree)
+ case from.Function(vparams, body) =>
+ new Function(vparams map importValDef, importTree(body))
+ case from.Assign(lhs, rhs) =>
+ new Assign(importTree(lhs), importTree(rhs))
+ case from.AssignOrNamedArg(lhs, rhs) =>
+ new AssignOrNamedArg(importTree(lhs), importTree(rhs))
+ case from.If(cond, thenp, elsep) =>
+ new If(importTree(cond), importTree(thenp), importTree(elsep))
+ case from.Match(selector, cases) =>
+ new Match(importTree(selector), cases map importCaseDef)
+ case from.Return(expr) =>
+ new Return(importTree(expr))
+ case from.Try(block, catches, finalizer) =>
+ new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
+ case from.Throw(expr) =>
+ new Throw(importTree(expr))
+ case from.New(tpt) =>
+ new New(importTree(tpt))
+ case from.Typed(expr, tpt) =>
+ new Typed(importTree(expr), importTree(tpt))
+ case from.TypeApply(fun, args) =>
+ new TypeApply(importTree(fun), args map importTree)
+ case from.Apply(fun, args) => their match {
+ case _: from.ApplyToImplicitArgs =>
+ new ApplyToImplicitArgs(importTree(fun), args map importTree)
+ case _: from.ApplyImplicitView =>
+ new ApplyImplicitView(importTree(fun), args map importTree)
+ case _ =>
+ new Apply(importTree(fun), args map importTree)
+ }
+ case from.ApplyDynamic(qual, args) =>
+ new ApplyDynamic(importTree(qual), args map importTree)
+ case from.Super(qual, mix) =>
+ new Super(importTree(qual), importName(mix).toTypeName)
+ case from.This(qual) =>
+ new This(importName(qual).toTypeName)
+ case from.Select(qual, name) =>
+ new Select(importTree(qual), importName(name))
+ case from.Ident(name) =>
+ new Ident(importName(name))
+ case from.ReferenceToBoxed(ident) =>
+ new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
+ case from.Literal(constant @ from.Constant(_)) =>
+ new Literal(importConstant(constant))
+ case theirtt @ from.TypeTree() =>
+ val mytt = TypeTree()
+ if (theirtt.original != null) mytt.setOriginal(importTree(theirtt.original))
+ mytt
+ case from.Annotated(annot, arg) =>
+ new Annotated(importTree(annot), importTree(arg))
+ case from.SingletonTypeTree(ref) =>
+ new SingletonTypeTree(importTree(ref))
+ case from.SelectFromTypeTree(qual, name) =>
+ new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
+ case from.CompoundTypeTree(templ) =>
+ new CompoundTypeTree(importTemplate(templ))
+ case from.AppliedTypeTree(tpt, args) =>
+ new AppliedTypeTree(importTree(tpt), args map importTree)
+ case from.TypeBoundsTree(lo, hi) =>
+ new TypeBoundsTree(importTree(lo), importTree(hi))
+ case from.ExistentialTypeTree(tpt, whereClauses) =>
+ new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
+ case from.EmptyTree =>
+ EmptyTree
+ case null =>
+ null
+ }
+
+ def importTree(their: from.Tree): Tree = {
+ val my = recreateTree(their)
+ if (my != null) {
+ addFixup(recreatedTreeCompleter(their, my))
+ tryFixup()
+ }
+ my
+ }
+
+ // ============== MISCELLANEOUS ==============
def importAnnotationInfo(ann: from.AnnotationInfo): AnnotationInfo = {
val atp1 = importType(ann.atp)
@@ -303,11 +436,9 @@ trait Importers extends api.Importers { self: SymbolTable =>
NestedAnnotArg(importAnnotationInfo(annInfo))
}
- def importTypeConstraint(constr: from.TypeConstraint): TypeConstraint = {
- val result = new TypeConstraint(constr.loBounds map importType, constr.hiBounds map importType)
- result.inst = importType(constr.inst)
- result
- }
+ // todo. careful import of positions
+ def importPosition(their: from.Position): to.Position =
+ their.asInstanceOf[Position]
// !!! todo: override to cater for PackageScopes
def importScope(decls: from.Scope): Scope =
@@ -315,137 +446,12 @@ trait Importers extends api.Importers { self: SymbolTable =>
def importName(name: from.Name): Name =
if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString)
- def importTypeName(name: from.TypeName): TypeName = importName(name).toTypeName
- def importTermName(name: from.TermName): TermName = importName(name).toTermName
def importModifiers(mods: from.Modifiers): Modifiers =
new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree)
def importImportSelector(sel: from.ImportSelector): ImportSelector =
new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos)
-
- def importTree(tree: from.Tree): Tree = {
- val mytree = tree match {
- case from.ClassDef(mods, name, tparams, impl) =>
- new ClassDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTemplate(impl))
- case from.PackageDef(pid, stats) =>
- new PackageDef(importRefTree(pid), stats map importTree)
- case from.ModuleDef(mods, name, impl) =>
- new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
- case from.emptyValDef =>
- emptyValDef
- case from.ValDef(mods, name, tpt, rhs) =>
- new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
- case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- new DefDef(importModifiers(mods), importName(name).toTermName, tparams map importTypeDef, mmap(vparamss)(importValDef), importTree(tpt), importTree(rhs))
- case from.TypeDef(mods, name, tparams, rhs) =>
- new TypeDef(importModifiers(mods), importName(name).toTypeName, tparams map importTypeDef, importTree(rhs))
- case from.LabelDef(name, params, rhs) =>
- new LabelDef(importName(name).toTermName, params map importIdent, importTree(rhs))
- case from.Import(expr, selectors) =>
- new Import(importTree(expr), selectors map importImportSelector)
- case from.Template(parents, self, body) =>
- new Template(parents map importTree, importValDef(self), body map importTree)
- case from.Block(stats, expr) =>
- new Block(stats map importTree, importTree(expr))
- case from.CaseDef(pat, guard, body) =>
- new CaseDef(importTree(pat), importTree(guard), importTree(body))
- case from.Alternative(trees) =>
- new Alternative(trees map importTree)
- case from.Star(elem) =>
- new Star(importTree(elem))
- case from.Bind(name, body) =>
- new Bind(importName(name), importTree(body))
- case from.UnApply(fun, args) =>
- new UnApply(importTree(fun), args map importTree)
- case from.ArrayValue(elemtpt ,elems) =>
- new ArrayValue(importTree(elemtpt), elems map importTree)
- case from.Function(vparams, body) =>
- new Function(vparams map importValDef, importTree(body))
- case from.Assign(lhs, rhs) =>
- new Assign(importTree(lhs), importTree(rhs))
- case from.AssignOrNamedArg(lhs, rhs) =>
- new AssignOrNamedArg(importTree(lhs), importTree(rhs))
- case from.If(cond, thenp, elsep) =>
- new If(importTree(cond), importTree(thenp), importTree(elsep))
- case from.Match(selector, cases) =>
- new Match(importTree(selector), cases map importCaseDef)
- case from.Return(expr) =>
- new Return(importTree(expr))
- case from.Try(block, catches, finalizer) =>
- new Try(importTree(block), catches map importCaseDef, importTree(finalizer))
- case from.Throw(expr) =>
- new Throw(importTree(expr))
- case from.New(tpt) =>
- new New(importTree(tpt))
- case from.Typed(expr, tpt) =>
- new Typed(importTree(expr), importTree(tpt))
- case from.TypeApply(fun, args) =>
- new TypeApply(importTree(fun), args map importTree)
- case from.Apply(fun, args) => tree match {
- case _: from.ApplyToImplicitArgs =>
- new ApplyToImplicitArgs(importTree(fun), args map importTree)
- case _: from.ApplyImplicitView =>
- new ApplyImplicitView(importTree(fun), args map importTree)
- case _ =>
- new Apply(importTree(fun), args map importTree)
- }
- case from.ApplyDynamic(qual, args) =>
- new ApplyDynamic(importTree(qual), args map importTree)
- case from.Super(qual, mix) =>
- new Super(importTree(qual), importTypeName(mix))
- case from.This(qual) =>
- new This(importName(qual).toTypeName)
- case from.Select(qual, name) =>
- new Select(importTree(qual), importName(name))
- case from.Ident(name) =>
- new Ident(importName(name))
- case from.ReferenceToBoxed(ident) =>
- new ReferenceToBoxed(importTree(ident) match { case ident: Ident => ident })
- case from.Literal(constant @ from.Constant(_)) =>
- new Literal(importConstant(constant))
- case from.TypeTree() =>
- new TypeTree()
- case from.Annotated(annot, arg) =>
- new Annotated(importTree(annot), importTree(arg))
- case from.SingletonTypeTree(ref) =>
- new SingletonTypeTree(importTree(ref))
- case from.SelectFromTypeTree(qual, name) =>
- new SelectFromTypeTree(importTree(qual), importName(name).toTypeName)
- case from.CompoundTypeTree(templ) =>
- new CompoundTypeTree(importTemplate(templ))
- case from.AppliedTypeTree(tpt, args) =>
- new AppliedTypeTree(importTree(tpt), args map importTree)
- case from.TypeBoundsTree(lo, hi) =>
- new TypeBoundsTree(importTree(lo), importTree(hi))
- case from.ExistentialTypeTree(tpt, whereClauses) =>
- new ExistentialTypeTree(importTree(tpt), whereClauses map importTree)
- case from.EmptyTree =>
- EmptyTree
- case null =>
- null
- }
- addFixup({
- if (mytree != null) {
- val mysym = if (tree.hasSymbol) importSymbol(tree.symbol) else NoSymbol
- val mytpe = importType(tree.tpe)
-
- mytree match {
- case mytt: TypeTree =>
- val tt = tree.asInstanceOf[from.TypeTree]
- if (mytree.hasSymbol) mytt.symbol = mysym
- if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe)
- if (tt.original != null) mytt.setOriginal(importTree(tt.original))
- case _ =>
- if (mytree.hasSymbol) mytree.symbol = importSymbol(tree.symbol)
- mytree.tpe = importType(tree.tpe)
- }
- }
- })
- tryFixup()
- mytree
- }
-
def importValDef(tree: from.ValDef): ValDef = importTree(tree).asInstanceOf[ValDef]
def importTypeDef(tree: from.TypeDef): TypeDef = importTree(tree).asInstanceOf[TypeDef]
def importTemplate(tree: from.Template): Template = importTree(tree).asInstanceOf[Template]
diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala
index 82904b0b68..3814259e22 100644
--- a/src/reflect/scala/reflect/internal/InfoTransformers.scala
+++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
trait InfoTransformers {
@@ -43,7 +44,7 @@ trait InfoTransformers {
if (from == this.pid) this
else if (from < this.pid)
if (prev.pid < from) this
- else prev.nextFrom(from);
+ else prev.nextFrom(from)
else if (next.pid == NoPhase.id) next
else next.nextFrom(from)
}
diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
new file mode 100644
index 0000000000..fb1cdb34e1
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala
@@ -0,0 +1,47 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala
+package reflect
+package internal
+
+import scala.language.implicitConversions
+import java.lang.{ Class => jClass }
+import java.lang.annotation.{ Annotation => jAnnotation }
+import java.lang.reflect.{
+ Member => jMember, Constructor => jConstructor, Method => jMethod,
+ AnnotatedElement => jAnnotatedElement, Type => jType,
+ TypeVariable => jTypeVariable
+}
+
+/** This class tries to abstract over some of the duplication
+ * in java.lang.reflect.{ Method, Constructor }.
+ */
+class JMethodOrConstructor(val member: jMember with jAnnotatedElement) {
+ def isVarArgs: Boolean = member match {
+ case m: jMethod => m.isVarArgs
+ case m: jConstructor[_] => m.isVarArgs
+ }
+ def typeParams: Array[_ <: jTypeVariable[_]] = member match {
+ case m: jMethod => m.getTypeParameters
+ case m: jConstructor[_] => m.getTypeParameters
+ }
+ def paramTypes: Array[jType] = member match {
+ case m: jMethod => m.getGenericParameterTypes
+ case m: jConstructor[_] => m.getGenericParameterTypes
+ }
+ def paramAnnotations: Array[Array[jAnnotation]] = member match {
+ case m: jMethod => m.getParameterAnnotations
+ case m: jConstructor[_] => m.getParameterAnnotations
+ }
+ def resultType: jType = member match {
+ case m: jMethod => m.getGenericReturnType
+ case m: jConstructor[_] => classOf[Unit]
+ }
+}
+
+object JMethodOrConstructor {
+ implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor = new JMethodOrConstructor(m)
+ implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = new JMethodOrConstructor(m)
+}
diff --git a/src/reflect/scala/reflect/internal/JavaAccFlags.scala b/src/reflect/scala/reflect/internal/JavaAccFlags.scala
new file mode 100644
index 0000000000..0a33b8cf0d
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/JavaAccFlags.scala
@@ -0,0 +1,84 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+package scala
+package reflect
+package internal
+
+import java.lang.{ Class => jClass }
+import java.lang.reflect.{ Member => jMember, Constructor => jConstructor, Field => jField, Method => jMethod }
+import JavaAccFlags._
+import ClassfileConstants._
+
+/** A value class which encodes the access_flags (JVMS 4.1)
+ * for a field, method, or class. The low 16 bits are the same
+ * as those returned by java.lang.reflect.Member#getModifiers
+ * and found in the bytecode.
+ *
+ * The high bits encode whether the access flags are directly
+ * associated with a class, constructor, field, or method.
+ */
+final class JavaAccFlags private (val coded: Int) extends AnyVal {
+ private def has(mask: Int) = (flags & mask) != 0
+ private def flagCarrierId = coded >>> 16
+ private def flags = coded & 0xFFFF
+
+ def isAbstract = has(JAVA_ACC_ABSTRACT)
+ def isAnnotation = has(JAVA_ACC_ANNOTATION)
+ def isBridge = has(JAVA_ACC_BRIDGE)
+ def isEnum = has(JAVA_ACC_ENUM)
+ def isFinal = has(JAVA_ACC_FINAL)
+ def isInterface = has(JAVA_ACC_INTERFACE)
+ def isNative = has(JAVA_ACC_NATIVE)
+ def isPrivate = has(JAVA_ACC_PRIVATE)
+ def isProtected = has(JAVA_ACC_PROTECTED)
+ def isPublic = has(JAVA_ACC_PUBLIC)
+ def isStatic = has(JAVA_ACC_STATIC)
+ def isStrictFp = has(JAVA_ACC_STRICT)
+ def isSuper = has(JAVA_ACC_SUPER)
+ def isSynchronized = has(JAVA_ACC_SYNCHRONIZED)
+ def isSynthetic = has(JAVA_ACC_SYNTHETIC)
+ def isTransient = has(JAVA_ACC_TRANSIENT)
+ def isVarargs = has(JAVA_ACC_VARARGS)
+ def isVolatile = has(JAVA_ACC_VOLATILE)
+
+ /** Do these flags describe a member which has either protected or package access?
+ * Such access in java is encoded in scala as protected[foo] or private[foo], where
+ * `foo` is the defining package.
+ */
+ def hasPackageAccessBoundary = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PUBLIC) // equivalently, allows protected or package level access
+ def isPackageProtected = !has(JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)
+
+ def toJavaFlags: Int = flags
+ def toScalaFlags: Long = flagCarrierId match {
+ case Method | Constructor => FlagTranslation methodFlags flags
+ case Class => FlagTranslation classFlags flags
+ case _ => FlagTranslation fieldFlags flags
+ }
+}
+
+object JavaAccFlags {
+ private val Unknown = 0
+ private val Class = 1
+ private val Field = 2
+ private val Method = 3
+ private val Constructor = 4
+
+ private def create(flagCarrier: Int, access_flags: Int): JavaAccFlags =
+ new JavaAccFlags((flagCarrier << 16) | (access_flags & 0xFFFF))
+
+ def classFlags(flags: Int): JavaAccFlags = create(Class, flags)
+ def methodFlags(flags: Int): JavaAccFlags = create(Method, flags)
+ def fieldFlags(flags: Int): JavaAccFlags = create(Field, flags)
+ def constructorFlags(flags: Int): JavaAccFlags = create(Constructor, flags)
+
+ def apply(access_flags: Int): JavaAccFlags = create(Unknown, access_flags)
+ def apply(clazz: jClass[_]): JavaAccFlags = classFlags(clazz.getModifiers)
+ def apply(member: jMember): JavaAccFlags = member match {
+ case x: jConstructor[_] => constructorFlags(x.getModifiers)
+ case x: jMethod => methodFlags(x.getModifiers)
+ case x: jField => fieldFlags(x.getModifiers)
+ case _ => apply(member.getModifiers)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala
index 08686832ef..46a95c7d26 100644
--- a/src/reflect/scala/reflect/internal/Kinds.scala
+++ b/src/reflect/scala/reflect/internal/Kinds.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
@@ -36,7 +37,7 @@ trait Kinds {
private def varStr(s: Symbol): String =
if (s.isCovariant) "covariant"
else if (s.isContravariant) "contravariant"
- else "invariant";
+ else "invariant"
private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else {
if((a0 eq b0) || (a0.owner eq b0.owner)) ""
@@ -86,15 +87,15 @@ trait Kinds {
// plan: split into kind inference and subkinding
// every Type has a (cached) Kind
def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean =
- checkKindBounds0(tparams, targs, pre, owner, false).isEmpty
+ checkKindBounds0(tparams, targs, pre, owner, explainErrors = false).isEmpty
/** Check whether `sym1`'s variance conforms to `sym2`'s variance.
*
* If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal.
*/
private def variancesMatch(sym1: Symbol, sym2: Symbol) = (
- sym2.variance==0
- || sym1.variance==sym2.variance
+ sym2.variance.isInvariant
+ || sym1.variance == sym2.variance
)
/** Check well-kindedness of type application (assumes arities are already checked) -- @M
@@ -145,7 +146,7 @@ trait Kinds {
kindErrors = f(kindErrors)
}
- if (settings.debug.value) {
+ if (settings.debug) {
log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner)
log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner)
log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs)
@@ -201,7 +202,7 @@ trait Kinds {
else NoKindErrors
}
- if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log(
+ if (settings.debug && (tparams.nonEmpty || targs.nonEmpty)) log(
"checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", "
+ owner + ", " + explainErrors + ")"
)
@@ -229,4 +230,180 @@ trait Kinds {
}
}
}
-} \ No newline at end of file
+
+ /**
+ * The data structure describing the kind of a given type.
+ *
+ * Proper types are represented using ProperTypeKind.
+ *
+ * Type constructors are reprented using TypeConKind.
+ */
+ abstract class Kind {
+ import Kind.StringState
+ def description: String
+ def order: Int
+ def bounds: TypeBounds
+
+ /** Scala syntax notation of this kind.
+ * Proper types are expresses as A.
+ * Type constructors are expressed as F[k1 >: lo <: hi, k2, ...] where k1, k2, ... are parameter kinds.
+ * If the bounds exists at any level, it preserves the type variable names. Otherwise,
+ * it uses prescribed letters for each level: A, F, X, Y, Z.
+ */
+ def scalaNotation: String
+
+ /** Kind notation used in http://adriaanm.github.com/files/higher.pdf.
+ * Proper types are expressed as *.
+ * Type constructors are expressed * -> *(lo, hi) -(+)-> *.
+ */
+ def starNotation: String
+
+ /** Contains bounds either as part of itself or its arguments.
+ */
+ def hasBounds: Boolean = !bounds.isEmptyBounds
+
+ private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState
+ }
+ object Kind {
+ private[internal] sealed trait ScalaNotation
+ private[internal] sealed case class Head(order: Int, n: Option[Int], alias: Option[String]) extends ScalaNotation {
+ override def toString: String = {
+ alias getOrElse {
+ typeAlias(order) + n.map(_.toString).getOrElse("")
+ }
+ }
+ private def typeAlias(x: Int): String =
+ x match {
+ case 0 => "A"
+ case 1 => "F"
+ case 2 => "X"
+ case 3 => "Y"
+ case 4 => "Z"
+ case n if n < 12 => ('O'.toInt - 5 + n).toChar.toString
+ case _ => "V"
+ }
+ }
+ private[internal] sealed case class Text(value: String) extends ScalaNotation {
+ override def toString: String = value
+ }
+ private[internal] case class StringState(tokens: Seq[ScalaNotation]) {
+ override def toString: String = tokens.mkString
+ def append(value: String): StringState = StringState(tokens :+ Text(value))
+ def appendHead(order: Int, sym: Symbol): StringState = {
+ val n = countByOrder(order) + 1
+ val alias = if (sym eq NoSymbol) None
+ else Some(sym.nameString)
+ StringState(tokens :+ Head(order, Some(n), alias))
+ }
+ def countByOrder(o: Int): Int = tokens count {
+ case Head(`o`, _, _) => true
+ case t => false
+ }
+ // Replace Head(o, Some(1), a) with Head(o, None, a) if countByOrder(o) <= 1, so F1[A] becomes F[A]
+ def removeOnes: StringState = {
+ val maxOrder = (tokens map {
+ case Head(o, _, _) => o
+ case _ => 0
+ }).max
+ StringState((tokens /: (0 to maxOrder)) { (ts: Seq[ScalaNotation], o: Int) =>
+ if (countByOrder(o) <= 1)
+ ts map {
+ case Head(`o`, _, a) => Head(o, None, a)
+ case t => t
+ }
+ else ts
+ })
+ }
+ // Replace Head(o, n, Some(_)) with Head(o, n, None), so F[F] becomes F[A].
+ def removeAlias: StringState = {
+ StringState(tokens map {
+ case Head(o, n, Some(_)) => Head(o, n, None)
+ case t => t
+ })
+ }
+ }
+ private[internal] object StringState {
+ def empty: StringState = StringState(Seq())
+ }
+ }
+ class ProperTypeKind(val bounds: TypeBounds) extends Kind {
+ import Kind.StringState
+ val description: String = "This is a proper type."
+ val order = 0
+ private[internal] def buildState(sym: Symbol, v: Variance)(s: StringState): StringState = {
+ s.append(v.symbolicString).appendHead(order, sym).append(bounds.scalaNotation(_.toString))
+ }
+ def scalaNotation: String = Kind.Head(order, None, None) + bounds.scalaNotation(_.toString)
+ def starNotation: String = "*" + bounds.starNotation(_.toString)
+ }
+ object ProperTypeKind {
+ def apply: ProperTypeKind = this(TypeBounds.empty)
+ def apply(bounds: TypeBounds): ProperTypeKind = new ProperTypeKind(bounds)
+ def unapply(ptk: ProperTypeKind): Some[TypeBounds] = Some(ptk.bounds)
+ }
+
+ class TypeConKind(val bounds: TypeBounds, val args: Seq[TypeConKind.Argument]) extends Kind {
+ import Kind.StringState
+ val order = (args map {_.kind.order} max) + 1
+ def description: String =
+ if (order == 1) "This is a type constructor: a 1st-order-kinded type."
+ else "This is a type constructor that takes type constructor(s): a higher-kinded type."
+ override def hasBounds: Boolean = super.hasBounds || args.exists(_.kind.hasBounds)
+ def scalaNotation: String = {
+ val s = buildState(NoSymbol, Variance.Invariant)(StringState.empty).removeOnes
+ val s2 = if (hasBounds) s
+ else s.removeAlias
+ s2.toString
+ }
+ private[internal] def buildState(sym: Symbol, v: Variance)(s0: StringState): StringState = {
+ var s: StringState = s0
+ s = s.append(v.symbolicString).appendHead(order, sym).append("[")
+ args.zipWithIndex foreach { case (arg, i) =>
+ s = arg.kind.buildState(arg.sym, arg.variance)(s)
+ if (i != args.size - 1) {
+ s = s.append(",")
+ }
+ }
+ s = s.append("]").append(bounds.scalaNotation(_.toString))
+ s
+ }
+ def starNotation: String = {
+ import Variance._
+ (args map { arg =>
+ (if (arg.kind.order == 0) arg.kind.starNotation
+ else "(" + arg.kind.starNotation + ")") +
+ (if (arg.variance == Invariant) " -> "
+ else " -(" + arg.variance.symbolicString + ")-> ")
+ }).mkString + "*" + bounds.starNotation(_.toString)
+ }
+ }
+ object TypeConKind {
+ def apply(args: Seq[TypeConKind.Argument]): TypeConKind = this(TypeBounds.empty, args)
+ def apply(bounds: TypeBounds, args: Seq[TypeConKind.Argument]): TypeConKind = new TypeConKind(bounds, args)
+ def unapply(tck: TypeConKind): Some[(TypeBounds, Seq[TypeConKind.Argument])] = Some(tck.bounds, tck.args)
+ case class Argument(variance: Variance, kind: Kind)(val sym: Symbol) {}
+ }
+
+ /**
+ * Starting from a Symbol (sym) or a Type (tpe), infer the kind that classifies it (sym.tpeHK/tpe).
+ */
+ object inferKind {
+ import TypeConKind.Argument
+
+ abstract class InferKind {
+ protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind
+ protected def infer(sym: Symbol, topLevel: Boolean): Kind = infer(sym.tpeHK, sym.owner, topLevel)
+ def apply(sym: Symbol): Kind = infer(sym, true)
+ def apply(tpe: Type, owner: Symbol): Kind = infer(tpe, owner, true)
+ }
+
+ def apply(pre: Type): InferKind = new InferKind {
+ protected def infer(tpe: Type, owner: Symbol, topLevel: Boolean): Kind = {
+ val bounds = if (topLevel) TypeBounds.empty
+ else tpe.asSeenFrom(pre, owner).bounds
+ if(!tpe.isHigherKinded) ProperTypeKind(bounds)
+ else TypeConKind(bounds, tpe.typeParams map { p => Argument(p.variance, infer(p, false))(p) })
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 0beb8e368f..bf38c3bf1e 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -19,6 +20,8 @@ trait Mirrors extends api.Mirrors {
trait RootSymbol extends Symbol { def mirror: Mirror }
abstract class RootsBase(rootOwner: Symbol) extends scala.reflect.api.Mirror[Mirrors.this.type] { thisMirror =>
+ private[this] var initialized = false
+ def isMirrorInitialized = initialized
protected[scala] def rootLoader: LazyType
@@ -33,18 +36,18 @@ trait Mirrors extends api.Mirrors {
else definitions.findNamedMember(segs.tail, RootClass.info member segs.head)
}
- /** Todo: organize similar to mkStatic in reflect.Base */
+ /** Todo: organize similar to mkStatic in scala.reflect.Base */
private def getModuleOrClass(path: Name, len: Int): Symbol = {
val point = path lastPos('.', len - 1)
val owner =
if (point > 0) getModuleOrClass(path.toTermName, point)
else RootClass
val name = path subName (point + 1, len)
- var sym = owner.info member name
+ val sym = owner.info member name
val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym
if (result != NoSymbol) result
else {
- if (settings.debug.value) { log(sym.info); log(sym.info.members) }//debug
+ if (settings.debug) { log(sym.info); log(sym.info.members) }//debug
thisMirror.missingHook(owner, name) orElse {
MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror)
}
@@ -76,7 +79,9 @@ trait Mirrors extends api.Mirrors {
protected def universeMissingHook(owner: Symbol, name: Name): Symbol = thisUniverse.missingHook(owner, name)
- private[scala] def missingHook(owner: Symbol, name: Name): Symbol = mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+ private[scala] def missingHook(owner: Symbol, name: Name): Symbol = logResult(s"missingHook($owner, $name)")(
+ mirrorMissingHook(owner, name) orElse universeMissingHook(owner, name)
+ )
// todo: get rid of most the methods here and keep just staticClass/Module/Package
@@ -168,14 +173,15 @@ trait Mirrors extends api.Mirrors {
case _ => MissingRequirementError.notFound("package " + fullname)
}
- def getPackage(fullname: Name): ModuleSymbol =
+ def getPackage(fullname: TermName): ModuleSymbol =
ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname), allowModules = true)
- def getRequiredPackage(fullname: String): ModuleSymbol =
+ @deprecated("Use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol =
getPackage(newTermNameCached(fullname))
- def getPackageObject(fullname: String): ModuleSymbol =
- (getPackage(newTermName(fullname)).info member nme.PACKAGE) match {
+ def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname))
+ def getPackageObject(fullname: TermName): ModuleSymbol =
+ (getPackage(fullname).info member nme.PACKAGE) match {
case x: ModuleSymbol => x
case _ => MissingRequirementError.notFound("package object " + fullname)
}
@@ -183,8 +189,8 @@ trait Mirrors extends api.Mirrors {
def getPackageObjectIfDefined(fullname: String): Symbol =
getPackageObjectIfDefined(newTermNameCached(fullname))
- def getPackageObjectIfDefined(fullname: Name): Symbol =
- wrapMissing(getPackageObject(fullname.toTermName))
+ def getPackageObjectIfDefined(fullname: TermName): Symbol =
+ wrapMissing(getPackageObject(fullname))
override def staticPackage(fullname: String): ModuleSymbol =
ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false)
@@ -192,8 +198,8 @@ trait Mirrors extends api.Mirrors {
/************************ helpers ************************/
def erasureName[T: ClassTag] : String = {
- /** We'd like the String representation to be a valid
- * scala type, so we have to decode the jvm's secret language.
+ /* We'd like the String representation to be a valid
+ * scala type, so we have to decode the jvm's secret language.
*/
def erasureString(clazz: Class[_]): String = {
if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]"
@@ -202,7 +208,7 @@ trait Mirrors extends api.Mirrors {
erasureString(classTag[T].runtimeClass)
}
- @inline private def wrapMissing(body: => Symbol): Symbol =
+ @inline final def wrapMissing(body: => Symbol): Symbol =
try body
catch { case _: MissingRequirementError => NoSymbol }
@@ -228,6 +234,7 @@ trait Mirrors extends api.Mirrors {
// }
def init() {
+ if (initialized) return
// Still fiddling with whether it's cleaner to do some of this setup here
// or from constructors. The latter approach tends to invite init order issues.
@@ -239,6 +246,8 @@ trait Mirrors extends api.Mirrors {
RootClass.info.decls enter EmptyPackage
RootClass.info.decls enter RootPackage
+
+ initialized = true
}
}
diff --git a/src/reflect/scala/reflect/internal/MissingRequirementError.scala b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
index 48203caa83..66dbf535d7 100644
--- a/src/reflect/scala/reflect/internal/MissingRequirementError.scala
+++ b/src/reflect/scala/reflect/internal/MissingRequirementError.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
class MissingRequirementError private (msg: String) extends FatalError(msg) {
diff --git a/src/reflect/scala/reflect/internal/Mode.scala b/src/reflect/scala/reflect/internal/Mode.scala
new file mode 100644
index 0000000000..027e3a340a
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Mode.scala
@@ -0,0 +1,142 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package reflect
+package internal
+
+import scala.language.implicitConversions
+
+object Mode {
+ private implicit def liftIntBitsToMode(bits: Int): Mode = apply(bits)
+ def apply(bits: Int): Mode = new Mode(bits)
+
+ /** NOmode, EXPRmode and PATTERNmode are mutually exclusive.
+ */
+ final val NOmode: Mode = 0x000
+ final val EXPRmode: Mode = 0x001
+ final val PATTERNmode: Mode = 0x002
+
+ /** TYPEmode needs a comment. <-- XXX.
+ */
+ final val TYPEmode: Mode = 0x004
+
+ /** SCCmode is orthogonal to above. When set we are
+ * in the this or super constructor call of a constructor.
+ */
+ final val SCCmode: Mode = 0x008
+
+ /** FUNmode is orthogonal to above.
+ * When set we are looking for a method or constructor.
+ */
+ final val FUNmode: Mode = 0x010
+
+ /** POLYmode is orthogonal to above.
+ * When set expression types can be polymorphic.
+ */
+ final val POLYmode: Mode = 0x020
+
+ /** QUALmode is orthogonal to above. When set
+ * expressions may be packages and Java statics modules.
+ */
+ final val QUALmode: Mode = 0x040
+
+ /** TAPPmode is set for the function/type constructor
+ * part of a type application. When set we do not decompose PolyTypes.
+ */
+ final val TAPPmode: Mode = 0x080
+
+ /** LHSmode is set for the left-hand side of an assignment.
+ */
+ final val LHSmode: Mode = 0x400
+
+ /** BYVALmode is set when we are typing an expression
+ * that occurs in a by-value position. An expression e1 is in by-value
+ * position within expression e2 iff it will be reduced to a value at that
+ * position during the evaluation of e2. Examples are by-value function
+ * arguments or the conditional of an if-then-else clause.
+ * This mode has been added to support continuations.
+ */
+ final val BYVALmode: Mode = 0x8000
+
+ /** TYPEPATmode is set when we are typing a type in a pattern.
+ */
+ final val TYPEPATmode: Mode = 0x10000
+
+ private val StickyModes: Mode = EXPRmode | PATTERNmode | TYPEmode
+ private val StickyModesForFun: Mode = StickyModes | SCCmode
+ final val MonoQualifierModes: Mode = EXPRmode | QUALmode
+ final val PolyQualifierModes: Mode = EXPRmode | QUALmode | POLYmode
+ final val OperatorModes: Mode = EXPRmode | POLYmode | TAPPmode | FUNmode
+
+ /** Translates a mask of mode flags into something readable.
+ */
+ private val modeNameMap = Map[Int, String]( // TODO why duplicate the bitmasks here, rather than just referring to this.EXPRmode etc?
+ (1 << 0) -> "EXPRmode",
+ (1 << 1) -> "PATTERNmode",
+ (1 << 2) -> "TYPEmode",
+ (1 << 3) -> "SCCmode",
+ (1 << 4) -> "FUNmode",
+ (1 << 5) -> "POLYmode",
+ (1 << 6) -> "QUALmode",
+ (1 << 7) -> "TAPPmode",
+ (1 << 8) -> "<>", // formerly SUPERCONSTRmode
+ (1 << 9) -> "<>", // formerly SNDTRYmode
+ (1 << 10) -> "LHSmode",
+ (1 << 11) -> "<>",
+ (1 << 12) -> "<>", // formerly STARmode
+ (1 << 13) -> "<>", // formerly ALTmode
+ (1 << 14) -> "<>", // formerly HKmode
+ (1 << 15) -> "BYVALmode",
+ (1 << 16) -> "TYPEPATmode"
+ ).map({ case (k, v) => Mode(k) -> v })
+}
+import Mode._
+
+final class Mode private (val bits: Int) extends AnyVal {
+ def &(other: Mode): Mode = new Mode(bits & other.bits)
+ def |(other: Mode): Mode = new Mode(bits | other.bits)
+ def &~(other: Mode): Mode = new Mode(bits & ~(other.bits))
+
+ def onlyTypePat = this & TYPEPATmode
+ def onlySticky = this & Mode.StickyModes
+ def forFunMode = this & Mode.StickyModesForFun | FUNmode | POLYmode | BYVALmode
+ def forTypeMode = if (typingPatternOrTypePat) TYPEmode | TYPEPATmode else TYPEmode
+
+ def inAll(required: Mode) = (this & required) == required
+ def inAny(required: Mode) = (this & required) != NOmode
+ def inNone(prohibited: Mode) = (this & prohibited) == NOmode
+
+ /** True if this mode matches every mode in the 'all' Mode,
+ * and no modes in the 'none' Mode.
+ */
+ def in(all: Mode = NOmode, none: Mode = NOmode) = inAll(all) && inNone(none)
+
+ def inByValMode = inAll(BYVALmode)
+ def inExprMode = inAll(EXPRmode)
+ def inFunMode = inAll(FUNmode)
+ def inPatternMode = inAll(PATTERNmode)
+ def inPolyMode = inAll(POLYmode)
+ def inQualMode = inAll(QUALmode)
+ def inSccMode = inAll(SCCmode)
+ def inTappMode = inAll(TAPPmode)
+ def inTypeMode = inAll(TYPEmode)
+
+ def typingExprByValue = inAll(EXPRmode | BYVALmode)
+ def typingExprFun = inAll(EXPRmode | FUNmode)
+ def typingExprNotFun = in(all = EXPRmode, none = FUNmode)
+ def typingExprNotFunNotLhs = in(all = EXPRmode, none = FUNmode | LHSmode)
+ def typingExprNotLhs = in(all = EXPRmode, none = LHSmode)
+ def typingExprNotValue = in(all = EXPRmode, none = BYVALmode)
+ def typingMonoExprByValue = in(all = EXPRmode | BYVALmode, none = POLYmode)
+ def typingConstructorPattern = inAll(PATTERNmode | FUNmode)
+ def typingPatternNotConstructor = in(all = PATTERNmode, none = FUNmode)
+ def typingPatternOrTypePat = inAny(PATTERNmode | TYPEPATmode)
+ def typingTypeByValue = inAll(TYPEmode | BYVALmode)
+
+ override def toString =
+ if (this == NOmode) "NOmode"
+ else (modeNameMap filterKeys inAll).values.toList.sorted mkString "-"
+}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index c78ba72dfb..ed5b92565d 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -3,29 +3,15 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.io.Codec
import java.security.MessageDigest
import scala.language.implicitConversions
-trait LowPriorityNames {
- self: Names =>
-
- implicit def nameToNameOps(name: Name): NameOps[Name] = new NameOps[Name](name)
-}
-
-/** The class Names ...
- *
- * @author Martin Odersky
- * @version 1.0, 05/02/2005
- */
-trait Names extends api.Names with LowPriorityNames {
- implicit def promoteTermNamesAsNecessary(name: Name): TermName = name.toTermName
-
-// Operations -------------------------------------------------------------
-
+trait Names extends api.Names {
private final val HASH_SIZE = 0x8000
private final val HASH_MASK = 0x7FFF
private final val NAME_SIZE = 0x20000
@@ -49,7 +35,7 @@ trait Names extends api.Names with LowPriorityNames {
cs(offset) * (41 * 41) +
cs(offset + len - 1) * 41 +
cs(offset + (len >> 1)))
- else 0;
+ else 0
/** Is (the ASCII representation of) name at given index equal to
* cs[offset..offset+len-1]?
@@ -57,7 +43,7 @@ trait Names extends api.Names with LowPriorityNames {
private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
var i = 0
while ((i < len) && (chrs(index + i) == cs(offset + i)))
- i += 1;
+ i += 1
i == len
}
@@ -135,9 +121,6 @@ trait Names extends api.Names with LowPriorityNames {
def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
- def nameChars: Array[Char] = chrs
- @deprecated("", "2.9.0") def view(s: String): TermName = newTermName(s)
-
// Classes ----------------------------------------------------------------------
/** The name class.
@@ -186,28 +169,26 @@ trait Names extends api.Names with LowPriorityNames {
scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
- final def toChars: Array[Char] = {
+ final def toChars: Array[Char] = { // used by ide
val cs = new Array[Char](len)
copyChars(cs, 0)
cs
}
- /** Write to UTF8 representation of this name to given character array.
- * Start copying to index `to`. Return index of next free byte in array.
- * Array must have enough remaining space for all bytes
- * (i.e. maximally 3*length bytes).
- */
- final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
- val bytes = Codec.toUTF8(chrs, index, len)
- scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
- offset + bytes.length
- }
-
/** @return the hash value of this name */
final override def hashCode(): Int = index
- // Presently disabled.
- // override def equals(other: Any) = paranoidEquals(other)
+ /** @return true if the string value of this name is equal
+ * to the string value of the given name or String.
+ */
+ def string_==(that: Name): Boolean = (that ne null) && (toString == that.toString)
+ def string_==(that: String): Boolean = (that ne null) && (toString == that)
+
+ /****
+ * This has been quite useful to find places where people are comparing
+ * a TermName and a TypeName, or a Name and a String.
+
+ override def equals(other: Any) = paranoidEquals(other)
private def paranoidEquals(other: Any): Boolean = {
val cmp = this eq other.asInstanceOf[AnyRef]
if (cmp || !nameDebug)
@@ -215,7 +196,7 @@ trait Names extends api.Names with LowPriorityNames {
other match {
case x: String =>
- Console.println("Compared " + debugString + " and String '" + x + "'")
+ Console.println(s"Compared $debugString and String '$x'")
case x: Name =>
if (this.isTermName != x.isTermName) {
val panic = this.toTermName == x.toTermName
@@ -228,6 +209,7 @@ trait Names extends api.Names with LowPriorityNames {
}
false
}
+ ****/
/** @return the i'th Char of this name */
final def charAt(i: Int): Char = chrs(index + i)
@@ -235,14 +217,14 @@ trait Names extends api.Names with LowPriorityNames {
/** @return the index of first occurrence of char c in this name, length if not found */
final def pos(c: Char): Int = pos(c, 0)
- /** @return the index of first occurrence of char c in this name, length if not found */
+ /** @return the index of first occurrence of s in this name, length if not found */
final def pos(s: String): Int = pos(s, 0)
/** Returns the index of the first occurrence of character c in
* this name from start, length if not found.
*
* @param c the character
- * @param start ...
+ * @param start the index from which to search
* @return the index of the first occurrence of c
*/
final def pos(c: Char, start: Int): Int = {
@@ -255,7 +237,7 @@ trait Names extends api.Names with LowPriorityNames {
* in this name from start, length if not found.
*
* @param s the string
- * @param start ...
+ * @param start the index from which to search
* @return the index of the first occurrence of s
*/
final def pos(s: String, start: Int): Int = {
@@ -279,13 +261,11 @@ trait Names extends api.Names with LowPriorityNames {
*/
final def lastPos(c: Char): Int = lastPos(c, len - 1)
- final def lastPos(s: String): Int = lastPos(s, len - s.length)
-
/** Returns the index of the last occurrence of char c in this
* name from start, -1 if not found.
*
* @param c the character
- * @param start ...
+ * @param start the index from which to search
* @return the index of the last occurrence of c
*/
final def lastPos(c: Char, start: Int): Int = {
@@ -294,26 +274,6 @@ trait Names extends api.Names with LowPriorityNames {
i
}
- /** Returns the index of the last occurrence of string s in this
- * name from start, -1 if not found.
- *
- * @param s the string
- * @param start ...
- * @return the index of the last occurrence of s
- */
- final def lastPos(s: String, start: Int): Int = {
- var i = lastPos(s.charAt(0), start)
- while (i >= 0) {
- var j = 1;
- while (s.charAt(j) == chrs(index + i + j)) {
- j += 1
- if (j == s.length()) return i;
- }
- i = lastPos(s.charAt(0), i - 1)
- }
- -s.length()
- }
-
/** Does this name start with prefix? */
final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
@@ -322,7 +282,7 @@ trait Names extends api.Names with LowPriorityNames {
var i = 0
while (i < prefix.length && start + i < len &&
chrs(index + start + i) == chrs(prefix.start + i))
- i += 1;
+ i += 1
i == prefix.length
}
@@ -334,7 +294,7 @@ trait Names extends api.Names with LowPriorityNames {
var i = 1
while (i <= suffix.length && i <= end &&
chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
- i += 1;
+ i += 1
i > suffix.length
}
@@ -366,16 +326,18 @@ trait Names extends api.Names with LowPriorityNames {
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
final def endsWith(name: String): Boolean = endsWith(newTermName(name))
- def indexOf(ch: Char) = {
- val idx = pos(ch)
- if (idx == length) -1 else idx
- }
- def indexOf(ch: Char, fromIndex: Int) = {
- val idx = pos(ch, fromIndex)
- if (idx == length) -1 else idx
- }
- def lastIndexOf(ch: Char) = lastPos(ch)
- def lastIndexOf(ch: Char, fromIndex: Int) = lastPos(ch, fromIndex)
+ /** Rewrite the confusing failure indication via result == length to
+ * the normal failure indication via result == -1.
+ */
+ private def fixIndexOf(idx: Int): Int = if (idx == length) -1 else idx
+
+ def indexOf(ch: Char) = fixIndexOf(pos(ch))
+ def indexOf(ch: Char, fromIndex: Int) = fixIndexOf(pos(ch, fromIndex))
+ def indexOf(s: String) = fixIndexOf(pos(s))
+
+ /** The lastPos methods already return -1 on failure. */
+ def lastIndexOf(ch: Char): Int = lastPos(ch)
+ def lastIndexOf(s: String): Int = toString lastIndexOf s
/** Replace all occurrences of `from` by `to` in
* name; result is always a term name.
@@ -424,24 +386,40 @@ trait Names extends api.Names with LowPriorityNames {
def append(ch: Char) = newName("" + this + ch)
def append(suffix: String) = newName("" + this + suffix)
def append(suffix: Name) = newName("" + this + suffix)
- def prepend(ch: Char) = newName("" + ch + this)
def prepend(prefix: String) = newName("" + prefix + this)
- def prepend(prefix: Name) = newName("" + prefix + this)
def decodedName: ThisNameType = newName(decode)
- def isOperatorName: Boolean = decode != toString
+ def isOperatorName: Boolean = decode != toString // used by ide
def longString: String = nameKind + " " + decode
def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
}
+ implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name)
implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name)
implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name)
+ /** FIXME: This is a good example of something which is pure "value class" but cannot
+ * reap the benefits because an (unused) $outer pointer so it is not single-field.
+ */
final class NameOps[T <: Name](name: T) {
- def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
- def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T]
- def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
- def nonEmpty: Boolean = name.length > 0
+ import NameTransformer._
+ def stripSuffix(suffix: String): T = stripSuffix(suffix: TermName)
+ def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
+ def take(n: Int): T = name.subName(0, n).asInstanceOf[T]
+ def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
+ def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T]
+ def dropLocal: TermName = name.toTermName stripSuffix LOCAL_SUFFIX_STRING
+ def dropSetter: TermName = name.toTermName stripSuffix SETTER_SUFFIX_STRING
+ def dropModule: T = this stripSuffix MODULE_SUFFIX_STRING
+ def localName: TermName = getterName append LOCAL_SUFFIX_STRING
+ def setterName: TermName = getterName append SETTER_SUFFIX_STRING
+ def getterName: TermName = dropTraitSetterSeparator.dropSetter.dropLocal
+
+ private def dropTraitSetterSeparator: TermName =
+ name indexOf TRAIT_SETTER_SEPARATOR_STRING match {
+ case -1 => name.toTermName
+ case idx => name.toTermName drop idx drop TRAIT_SETTER_SEPARATOR_STRING.length
+ }
}
implicit val NameTag = ClassTag[Name](classOf[Name])
@@ -485,7 +463,7 @@ trait Names extends api.Names with LowPriorityNames {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
- var next: TermName = termHashtable(hash)
+ val next: TermName = termHashtable(hash)
termHashtable(hash) = this
def isTermName: Boolean = true
def isTypeName: Boolean = false
@@ -510,11 +488,16 @@ trait Names extends api.Names with LowPriorityNames {
implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
+ object TermName extends TermNameExtractor {
+ def apply(s: String) = newTermName(s)
+ def unapply(name: TermName): Option[String] = Some(name.toString)
+ }
+
sealed abstract class TypeName(index0: Int, len0: Int, hash: Int) extends Name(index0, len0) {
type ThisNameType = TypeName
protected[this] def thisName: TypeName = this
- var next: TypeName = typeHashtable(hash)
+ val next: TypeName = typeHashtable(hash)
typeHashtable(hash) = this
def isTermName: Boolean = false
def isTypeName: Boolean = true
@@ -539,4 +522,9 @@ trait Names extends api.Names with LowPriorityNames {
}
implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
+
+ object TypeName extends TypeNameExtractor {
+ def apply(s: String) = newTypeName(s)
+ def unapply(name: TypeName): Option[String] = Some(name.toString)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala
index c0f4232724..450c90ed56 100644
--- a/src/reflect/scala/reflect/internal/Phase.scala
+++ b/src/reflect/scala/reflect/internal/Phase.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
abstract class Phase(val prev: Phase) {
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index faa161d6b1..eef8159ad4 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -1,34 +1,274 @@
-package scala.reflect
+package scala
+package reflect
package internal
+import util._
+import scala.collection.mutable.ListBuffer
+
+/** Handling range positions
+ * atPos, the main method in this trait, will add positions to a tree,
+ * and will ensure the following properties:
+ *
+ * 1. All nodes between the root of the tree and nodes that already have positions
+ * will be assigned positions.
+ * 2. No node which already has a position will be assigned a different range; however
+ * a RangePosition might become a TransparentPosition.
+ * 3. The position of each assigned node includes the positions of each of its children.
+ * 4. The positions of all solid descendants of children of an assigned node
+ * are mutually non-overlapping.
+ *
+ * Here, the solid descendant of a node are:
+ *
+ * If the node has a TransparentPosition, the solid descendants of all its children
+ * Otherwise, the singleton consisting of the node itself.
+ */
trait Positions extends api.Positions { self: SymbolTable =>
type Position = scala.reflect.internal.util.Position
val NoPosition = scala.reflect.internal.util.NoPosition
implicit val PositionTag = ClassTag[Position](classOf[Position])
+ def inform(msg: String): Unit
+
+ def useOffsetPositions: Boolean = true
+
/** A position that wraps a set of trees.
* The point of the wrapping position is the point of the default position.
* If some of the trees are ranges, returns a range position enclosing all ranges
* Otherwise returns default position that is either focused or not.
*/
- def wrappingPos(default: Position, trees: List[Tree]) = wrappingPos(default, trees, true)
- def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = default
+ def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, focus = true)
+ def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = {
+ if (useOffsetPositions) default else {
+ val ranged = trees filter (_.pos.isRange)
+ if (ranged.isEmpty) if (focus) default.focus else default
+ else new RangePosition(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max)
+ }
+ }
/** A position that wraps the non-empty set of trees.
* The point of the wrapping position is the point of the first trees' position.
* If some of the trees are non-synthetic, returns a range position enclosing the non-synthetic trees
* Otherwise returns a synthetic offset position to point.
*/
- def wrappingPos(trees: List[Tree]): Position = trees.head.pos
+ def wrappingPos(trees: List[Tree]): Position = {
+ val headpos = trees.head.pos
+ if (useOffsetPositions || !headpos.isDefined) headpos
+ else wrappingPos(headpos, trees)
+ }
/** Ensure that given tree has no positions that overlap with
* any of the positions of `others`. This is done by
* shortening the range, assigning TransparentPositions
* to some of the nodes in `tree` or focusing on the position.
*/
- def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, true) }
- def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {}
+ def ensureNonOverlapping(tree: Tree, others: List[Tree]){ ensureNonOverlapping(tree, others, focus = true) }
+ def ensureNonOverlapping(tree: Tree, others: List[Tree], focus: Boolean) {
+ if (useOffsetPositions) return
+
+ def isOverlapping(pos: Position) =
+ pos.isRange && (others exists (pos overlaps _.pos))
+
+ if (isOverlapping(tree.pos)) {
+ val children = tree.children
+ children foreach (ensureNonOverlapping(_, others, focus))
+ if (tree.pos.isOpaqueRange) {
+ val wpos = wrappingPos(tree.pos, children, focus)
+ tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos)
+ }
+ }
+ }
+
+ def rangePos(source: SourceFile, start: Int, point: Int, end: Int): Position =
+ if (useOffsetPositions) new OffsetPosition(source, point)
+ else new RangePosition(source, start, point, end)
+
+ def validatePositions(tree: Tree) {
+ if (useOffsetPositions) return
+
+ def reportTree(prefix : String, tree : Tree) {
+ val source = if (tree.pos.isDefined) tree.pos.source else ""
+ inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
+ inform("")
+ inform(treeStatus(tree))
+ inform("")
+ }
+
+ def positionError(msg: String)(body : => Unit) {
+ inform("======= Position error\n" + msg)
+ body
+ inform("\nWhile validating #" + tree.id)
+ inform(treeStatus(tree))
+ inform("\nChildren:")
+ tree.children map (t => " " + treeStatus(t, tree)) foreach inform
+ inform("=======")
+ throw new ValidateException(msg)
+ }
+
+ def validate(tree: Tree, encltree: Tree): Unit = {
+
+ if (!tree.isEmpty && tree.canHaveAttrs) {
+ if (settings.Yposdebug && (settings.verbose || settings.Yrangepos))
+ println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
+
+ if (!tree.pos.isDefined)
+ positionError("Unpositioned tree #"+tree.id) {
+ inform("%15s %s".format("unpositioned", treeStatus(tree, encltree)))
+ inform("%15s %s".format("enclosing", treeStatus(encltree)))
+ encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree))))
+ }
+ if (tree.pos.isRange) {
+ if (!encltree.pos.isRange)
+ positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") {
+ reportTree("Enclosing", encltree)
+ reportTree("Enclosed", tree)
+ }
+ if (!(encltree.pos includes tree.pos))
+ positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") {
+ reportTree("Enclosing", encltree)
+ reportTree("Enclosed", tree)
+ }
+
+ findOverlapping(tree.children flatMap solidDescendants) match {
+ case List() => ;
+ case xs => {
+ positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) {
+ reportTree("Ancestor", tree)
+ for((x, y) <- xs) {
+ reportTree("First overlapping", x)
+ reportTree("Second overlapping", y)
+ }
+ }
+ }
+ }
+ }
+ for (ct <- tree.children flatMap solidDescendants) validate(ct, tree)
+ }
+ }
+
+ if (!isPastTyper)
+ validate(tree, tree)
+ }
+
+ def solidDescendants(tree: Tree): List[Tree] =
+ if (tree.pos.isTransparent) tree.children flatMap solidDescendants
+ else List(tree)
+
+ /** A free range from `lo` to `hi` */
+ private def free(lo: Int, hi: Int): Range =
+ Range(new RangePosition(null, lo, lo, hi), EmptyTree)
+
+ /** The maximal free range */
+ private lazy val maxFree: Range = free(0, Int.MaxValue)
+
+ /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
+ private def maybeFree(lo: Int, hi: Int) =
+ if (lo < hi) List(free(lo, hi))
+ else List()
+
+ /** Insert `pos` into ranges `rs` if possible;
+ * otherwise add conflicting trees to `conflicting`.
+ */
+ private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match {
+ case List() =>
+ assert(conflicting.nonEmpty)
+ rs
+ case r :: rs1 =>
+ assert(!t.pos.isTransparent)
+ if (r.isFree && (r.pos includes t.pos)) {
+// println("subdividing "+r+"/"+t.pos)
+ maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1
+ } else {
+ if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree
+ r :: insert(rs1, t, conflicting)
+ }
+ }
+
+ /** Replace elem `t` of `ts` by `replacement` list. */
+ private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] =
+ if (ts.head == t) replacement ::: ts.tail
+ else ts.head :: replace(ts.tail, t, replacement)
+
+ /** Does given list of trees have mutually non-overlapping positions?
+ * pre: None of the trees is transparent
+ */
+ def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = {
+ var ranges = List(maxFree)
+ for (ct <- cts) {
+ if (ct.pos.isOpaqueRange) {
+ val conflicting = new ListBuffer[Tree]
+ ranges = insert(ranges, ct, conflicting)
+ if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct))
+ }
+ }
+ List()
+ }
+
+ /** Set position of all children of a node
+ * @param pos A target position.
+ * Uses the point of the position as the point of all positions it assigns.
+ * Uses the start of this position as an Offset position for unpositioed trees
+ * without children.
+ * @param trees The children to position. All children must be positionable.
+ */
+ private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
+ for (tree <- trees) {
+ if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ val children = tree.children
+ if (children.isEmpty) {
+ tree setPos pos.focus
+ } else {
+ setChildrenPos(pos, children)
+ tree setPos wrappingPos(pos, children)
+ }
+ }
+ }
+ } catch {
+ case ex: Exception =>
+ println("error while set children pos "+pos+" of "+trees)
+ throw ex
+ }
+
+
+ class ValidateException(msg : String) extends Exception(msg)
+
+
+ /** A locator for trees with given positions.
+ * Given a position `pos`, locator.apply returns
+ * the smallest tree that encloses `pos`.
+ */
+ class Locator(pos: Position) extends Traverser {
+ var last: Tree = _
+ def locateIn(root: Tree): Tree = {
+ this.last = EmptyTree
+ traverse(root)
+ this.last
+ }
+ protected def isEligible(t: Tree) = !t.pos.isTransparent
+ override def traverse(t: Tree) {
+ t match {
+ case tt : TypeTree if tt.original != null && (tt.pos includes tt.original.pos) =>
+ traverse(tt.original)
+ case _ =>
+ if (t.pos includes pos) {
+ if (isEligible(t)) last = t
+ super.traverse(t)
+ } else t match {
+ case mdef: MemberDef =>
+ traverseTrees(mdef.mods.annotations)
+ case _ =>
+ }
+ }
+ }
+ }
+
+ case class Range(pos: Position, tree: Tree) {
+ def isFree = tree == EmptyTree
+ }
+
+ class TypedLocator(pos: Position) extends Locator(pos) {
+ override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null
+ }
trait PosAssigner extends Traverser {
var pos: Position
@@ -38,7 +278,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
protected class DefaultPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
- if (t eq EmptyTree) ()
+ if (!t.canHaveAttrs) ()
else if (t.pos == NoPosition) {
t.setPos(pos)
super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
@@ -57,9 +297,25 @@ trait Positions extends api.Positions { self: SymbolTable =>
}
}
+ /** Position a tree.
+ * This means: Set position of a node and position all its unpositioned children.
+ */
def atPos[T <: Tree](pos: Position)(tree: T): T = {
- posAssigner.pos = pos
- posAssigner.traverse(tree)
- tree
+ if (useOffsetPositions || !pos.isOpaqueRange) {
+ posAssigner.pos = pos
+ posAssigner.traverse(tree)
+ tree
+ }
+ else {
+ if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ tree.setPos(pos)
+ val children = tree.children
+ if (children.nonEmpty) {
+ if (children.tail.isEmpty) atPos(pos)(children.head)
+ else setChildrenPos(pos, children)
+ }
+ }
+ tree
+ }
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index 80d247c0ea..1603029340 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -5,7 +5,8 @@
// todo. we need to unify this prettyprinter with NodePrinters
-package scala.reflect
+package scala
+package reflect
package internal
import java.io.{ OutputStream, PrintWriter, StringWriter, Writer }
@@ -25,26 +26,27 @@ trait Printers extends api.Printers { self: SymbolTable =>
if (nme.keywords(term) && term != nme.USCOREkw) "`%s`" format s
else s
}
- def quotedName(name: Name): String = quotedName(name, false)
- def quotedName(name: String): String = quotedName(newTermName(name), false)
+ def quotedName(name: Name): String = quotedName(name, decode = false)
+ def quotedName(name: String): String = quotedName(newTermName(name), decode = false)
private def symNameInternal(tree: Tree, name: Name, decoded: Boolean): String = {
- val sym = tree.symbol
- if (sym.name.toString == nme.ERROR.toString) {
- "<" + quotedName(name, decoded) + ": error>"
- } else if (sym != null && sym != NoSymbol) {
- val prefix = if (sym.isMixinConstructor) "/*%s*/".format(quotedName(sym.owner.name, decoded)) else ""
- var suffix = ""
- if (settings.uniqid.value) suffix += ("#" + sym.id)
- if (settings.Yshowsymkinds.value) suffix += ("#" + sym.abbreviatedKindString)
- prefix + quotedName(tree.symbol.decodedName) + suffix
- } else {
- quotedName(name, decoded)
- }
+ val sym = tree.symbol
+ def qname = quotedName(name.dropLocal, decoded)
+ def qowner = quotedName(sym.owner.name.dropLocal, decoded)
+ def qsymbol = quotedName(sym.nameString)
+
+ if (sym.name.toTermName == nme.ERROR)
+ s"<$qname: error>"
+ else if (sym == null || sym == NoSymbol)
+ qname
+ else if (sym.isMixinConstructor)
+ s"/*$qowner*/$qsymbol"
+ else
+ qsymbol
}
- def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, true)
- def symName(tree: Tree, name: Name) = symNameInternal(tree, name, false)
+ def decodedSymName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = true)
+ def symName(tree: Tree, name: Name) = symNameInternal(tree, name, decoded = false)
/** Turns a path into a String, introducing backquotes
* as necessary.
@@ -67,12 +69,12 @@ trait Printers extends api.Printers { self: SymbolTable =>
printIds = settings.uniqid.value
printKinds = settings.Yshowsymkinds.value
printMirrors = false // typically there's no point to print mirrors inside the compiler, as there is only one mirror there
- protected def doPrintPositions = settings.Xprintpos.value
+ printPositions = settings.Xprintpos.value
def indent() = indentMargin += indentStep
def undent() = indentMargin -= indentStep
- def printPosition(tree: Tree) = if (doPrintPositions) print(tree.pos.show)
+ def printPosition(tree: Tree) = if (printPositions) print(tree.pos.show)
def println() {
out.println()
@@ -91,8 +93,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
def printColumn(ts: List[Tree], start: String, sep: String, end: String) {
- print(start); indent; println()
- printSeq(ts){print(_)}{print(sep); println()}; undent; println(); print(end)
+ print(start); indent(); println()
+ printSeq(ts){print(_)}{print(sep); println()}; undent(); println(); print(end)
}
def printRow(ts: List[Tree], start: String, sep: String, end: String) {
@@ -168,7 +170,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
)
def printFlags(flags: Long, privateWithin: String) {
- var mask: Long = if (settings.debug.value) -1L else PrintableFlags
+ val mask: Long = if (settings.debug) -1L else PrintableFlags
val s = flagsToString(flags & mask, privateWithin)
if (s != "") print(s + " ")
}
@@ -208,7 +210,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case ModuleDef(mods, name, impl) =>
printAnnotations(tree)
- printModifiers(tree, mods);
+ printModifiers(tree, mods)
print("object " + symName(tree, name), " extends ", impl)
case ValDef(mods, name, tp, rhs) =>
@@ -327,10 +329,10 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(lhs, " = ", rhs)
case If(cond, thenp, elsep) =>
- print("if (", cond, ")"); indent; println()
- print(thenp); undent
+ print("if (", cond, ")"); indent(); println()
+ print(thenp); undent()
if (!elsep.isEmpty) {
- println(); print("else"); indent; println(); print(elsep); undent
+ println(); print("else"); indent(); println(); print(elsep); undent()
}
case Return(expr) =>
@@ -375,7 +377,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
if (!qual.isEmpty) print(symName(tree, qual) + ".")
print("this")
- case Select(qual @ New(tpe), name) if (!settings.debug.value) =>
+ case Select(qual @ New(tpe), name) if !settings.debug =>
print(qual)
case Select(qualifier, name) =>
@@ -389,7 +391,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(x.escapedStringValue)
case tt: TypeTree =>
- if ((tree.tpe eq null) || (doPrintPositions && tt.original != null)) {
+ if ((tree.tpe eq null) || (printPositions && tt.original != null)) {
if (tt.original != null) print("<type: ", tt.original, ">")
else print("<type ?>")
} else if ((tree.tpe.typeSymbol ne null) && tree.tpe.typeSymbol.isAnonymousClass) {
@@ -420,13 +422,20 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(tp); printRow(args, "[", ", ", "]")
case TypeBoundsTree(lo, hi) =>
- printOpt(" >: ", lo); printOpt(" <: ", hi)
+ // Avoid printing noisy empty typebounds everywhere
+ // Untyped empty bounds are not printed by printOpt,
+ // but after they are typed we have to exclude Nothing/Any.
+ if ((lo.tpe eq null) || !(lo.tpe =:= definitions.NothingTpe))
+ printOpt(" >: ", lo)
+
+ if ((hi.tpe eq null) || !(hi.tpe =:= definitions.AnyTpe))
+ printOpt(" <: ", hi)
case ExistentialTypeTree(tpt, whereClauses) =>
- print(tpt);
+ print(tpt)
printColumn(whereClauses, " forSome { ", ";", "}")
-// SelectFromArray is no longer visible in reflect.internal.
+// SelectFromArray is no longer visible in scala.reflect.internal.
// eliminated until we figure out what we will do with both Printers and
// SelectFromArray.
// case SelectFromArray(qualifier, name, _) =>
@@ -435,7 +444,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (printTypes && tree.isTerm && !tree.isEmpty) {
+ if (printTypes && tree.isTerm && tree.canHaveAttrs) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -475,8 +484,6 @@ trait Printers extends api.Printers { self: SymbolTable =>
}
def newRawTreePrinter(writer: PrintWriter): RawTreePrinter = new RawTreePrinter(writer)
- def newRawTreePrinter(stream: OutputStream): RawTreePrinter = newRawTreePrinter(new PrintWriter(stream))
- def newRawTreePrinter(): RawTreePrinter = newRawTreePrinter(new PrintWriter(ConsoleWriter))
// provides footnotes for types and mirrors
import scala.collection.mutable.{Map, WeakHashMap, SortedSet}
@@ -525,7 +532,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
private var depth = 0
private var printTypesInFootnotes = true
private var printingFootnotes = false
- private var footnotes = footnoteIndex.mkFootnotes()
+ private val footnotes = footnoteIndex.mkFootnotes()
def print(args: Any*): Unit = {
// don't print type footnotes if the argument is a mere type
@@ -542,14 +549,17 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(")")
case EmptyTree =>
print("EmptyTree")
- case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
+ case self.emptyValDef =>
print("emptyValDef")
+ case self.pendingSuperCall =>
+ print("pendingSuperCall")
case tree: Tree =>
- val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
- val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
+ val hasSymbolField = tree.hasSymbolField && tree.symbol != NoSymbol
+ val isError = hasSymbolField && (tree.symbol.name string_== nme.ERROR)
printProduct(
tree,
preamble = _ => {
+ if (printPositions) print(tree.pos.show)
print(tree.productPrefix)
if (printTypes && tree.tpe != null) print(tree.tpe)
},
@@ -559,7 +569,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
if (isError) print("<")
print(name)
if (isError) print(": error>")
- } else if (hasSymbol) {
+ } else if (hasSymbolField) {
tree match {
case refTree: RefTree =>
if (tree.symbol.name != refTree.name) print("[", tree.symbol, " aka ", refTree.name, "]")
@@ -651,7 +661,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
print("(")
val it = iterable.iterator
while (it.hasNext) {
- body(it.next)
+ body(it.next())
print(if (it.hasNext) ", " else "")
}
print(")")
@@ -672,7 +682,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case nme.CONSTRUCTOR => "nme.CONSTRUCTOR"
case nme.ROOTPKG => "nme.ROOTPKG"
case _ =>
- val prefix = if (name.isTermName) "newTermName(\"" else "newTypeName(\""
+ val prefix = if (name.isTermName) "TermName(\"" else "TypeName(\""
prefix + name.toString + "\")"
}
diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala
new file mode 100644
index 0000000000..996f9c13bb
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala
@@ -0,0 +1,27 @@
+package scala
+package reflect
+package internal
+
+import java.lang.{ Class => jClass }
+import java.lang.reflect.{ Member => jMember }
+
+trait PrivateWithin {
+ self: SymbolTable =>
+
+ def propagatePackageBoundary(c: jClass[_], syms: Symbol*): Unit =
+ propagatePackageBoundary(JavaAccFlags(c), syms: _*)
+ def propagatePackageBoundary(m: jMember, syms: Symbol*): Unit =
+ propagatePackageBoundary(JavaAccFlags(m), syms: _*)
+ def propagatePackageBoundary(jflags: JavaAccFlags, syms: Symbol*) {
+ if (jflags.hasPackageAccessBoundary)
+ syms foreach setPackageAccessBoundary
+ }
+
+ // protected in java means package protected. #3946
+ // See ticket #1687 for an example of when the enclosing top level class is NoSymbol;
+ // it apparently occurs when processing v45.3 bytecode.
+ def setPackageAccessBoundary(sym: Symbol): Symbol = (
+ if (sym.enclosingTopLevelClass eq NoSymbol) sym
+ else sym setPrivateWithin sym.enclosingTopLevelClass.owner
+ )
+}
diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala
index 842491d56d..14db252a16 100644
--- a/src/reflect/scala/reflect/internal/Required.scala
+++ b/src/reflect/scala/reflect/internal/Required.scala
@@ -1,15 +1,13 @@
-package scala.reflect
+package scala
+package reflect
package internal
import settings.MutableSettings
trait Required { self: SymbolTable =>
-
def picklerPhase: Phase
-
def settings: MutableSettings
- def forInteractive: Boolean
-
- def forScaladoc: Boolean
+ @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false
+ @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false
}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index ab3b9b7ed7..8d20c8e546 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -3,11 +3,22 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
+import scala.annotation.tailrec
+
trait Scopes extends api.Scopes { self: SymbolTable =>
+ /** An ADT to represent the results of symbol name lookups.
+ */
+ sealed trait NameLookup { def symbol: Symbol ; def isSuccess = false }
+ case class LookupSucceeded(qualifier: Tree, symbol: Symbol) extends NameLookup { override def isSuccess = true }
+ case class LookupAmbiguous(msg: String) extends NameLookup { def symbol = NoSymbol }
+ case class LookupInaccessible(symbol: Symbol, msg: String) extends NameLookup
+ case object LookupNotFound extends NameLookup { def symbol = NoSymbol }
+
class ScopeEntry(val sym: Symbol, val owner: Scope) {
/** the next entry in the hash bucket
*/
@@ -17,15 +28,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
*/
var next: ScopeEntry = null
+ def depth = owner.nestingLevel
override def hashCode(): Int = sym.name.start
- override def toString(): String = sym.toString()
+ override def toString() = s"$sym (depth=$depth)"
}
- /**
- * @param sym ...
- * @param owner ...
- * @return ...
- */
private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = {
val e = new ScopeEntry(sym, owner)
e.next = owner.elems
@@ -61,6 +68,11 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** a cache for all elements, to be used by symbol iterator.
*/
private var elemsCache: List[Symbol] = null
+ private var cachedSize = -1
+ private def flushElemsCache() {
+ elemsCache = null
+ cachedSize = -1
+ }
/** size and mask of hash tables
* todo: make hashtables grow?
@@ -82,6 +94,12 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** the number of entries in this scope */
override def size: Int = {
+ if (cachedSize < 0)
+ cachedSize = directSize
+
+ cachedSize
+ }
+ private def directSize: Int = {
var s = 0
var e = elems
while (e ne null) {
@@ -92,11 +110,9 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** enter a scope entry
- *
- * @param e ...
*/
protected def enterEntry(e: ScopeEntry) {
- elemsCache = null
+ flushElemsCache()
if (hashtable ne null)
enterInHash(e)
else if (size >= MIN_HASH)
@@ -110,8 +126,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** enter a symbol
- *
- * @param sym ...
*/
def enter[T <: Symbol](sym: T): T = {
enterEntry(newScopeEntry(sym, this))
@@ -119,8 +133,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** enter a symbol, asserting that no symbol with same name exists in scope
- *
- * @param sym ...
*/
def enterUnique(sym: Symbol) {
assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString))
@@ -175,8 +187,6 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
}
/** remove entry
- *
- * @param e ...
*/
def unlink(e: ScopeEntry) {
if (elems == e) {
@@ -192,30 +202,64 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (e1 == e) {
hashtable(index) = e.tail
} else {
- while (e1.tail != e) e1 = e1.tail;
+ while (e1.tail != e) e1 = e1.tail
e1.tail = e.tail
}
}
- elemsCache = null
+ flushElemsCache()
}
/** remove symbol */
def unlink(sym: Symbol) {
var e = lookupEntry(sym.name)
while (e ne null) {
- if (e.sym == sym) unlink(e);
+ if (e.sym == sym) unlink(e)
e = lookupNextEntry(e)
}
}
- /** lookup a symbol
- *
- * @param name ...
- * @return ...
+ /** Lookup a module or a class, filtering out matching names in scope
+ * which do not match that requirement.
+ */
+ def lookupModule(name: Name): Symbol = lookupAll(name.toTermName) find (_.isModule) getOrElse NoSymbol
+ def lookupClass(name: Name): Symbol = lookupAll(name.toTypeName) find (_.isClass) getOrElse NoSymbol
+
+ /** True if the name exists in this scope, false otherwise. */
+ def containsName(name: Name) = lookupEntry(name) != null
+
+ /** Lookup a symbol.
*/
def lookup(name: Name): Symbol = {
val e = lookupEntry(name)
- if (e eq null) NoSymbol else e.sym
+ if (e eq null) NoSymbol
+ else if (lookupNextEntry(e) eq null) e.sym
+ else {
+ // We shouldn't get here: until now this method was picking a random
+ // symbol when there was more than one with the name, so this should
+ // only be called knowing that there are 0-1 symbols of interest. So, we
+ // can safely return an overloaded symbol rather than throwing away the
+ // rest of them. Most likely we still break, but at least we will break
+ // in an understandable fashion (unexpectedly overloaded symbol) rather
+ // than a non-deterministic bizarre one (see any bug involving overloads
+ // in package objects.)
+ val alts = lookupAll(name).toList
+ def alts_s = alts map (s => s.defString) mkString " <and> "
+ devWarning(s"scope lookup of $name found multiple symbols: $alts_s")
+ // FIXME - how is one supposed to create an overloaded symbol without
+ // knowing the correct owner? Using the symbol owner is not correct;
+ // say for instance this is List's scope and the symbols are its three
+ // mkString members. Those symbols are owned by TraversableLike, which
+ // is no more meaningful an owner than NoSymbol given that we're in
+ // List. Maybe it makes no difference who owns the overloaded symbol, in
+ // which case let's establish that and have a canonical creation method.
+ //
+ // FIXME - a similar question for prefix, although there are more
+ // clues from the symbols on that one, as implemented here. In general
+ // the distinct list is one type and lub becomes the identity.
+ // val prefix = lub(alts map (_.info.prefix) distinct)
+ // Now using NoSymbol and NoPrefix always to avoid forcing info (SI-6664)
+ NoSymbol.newOverloaded(NoPrefix, alts)
+ }
}
/** Returns an iterator yielding every symbol with given name in this scope.
@@ -223,7 +267,20 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
def lookupAll(name: Name): Iterator[Symbol] = new Iterator[Symbol] {
var e = lookupEntry(name)
def hasNext: Boolean = e ne null
- def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
+ def next(): Symbol = try e.sym finally e = lookupNextEntry(e)
+ }
+
+ def lookupAllEntries(name: Name): Iterator[ScopeEntry] = new Iterator[ScopeEntry] {
+ var e = lookupEntry(name)
+ def hasNext: Boolean = e ne null
+ def next(): ScopeEntry = try e finally e = lookupNextEntry(e)
+ }
+
+ def lookupUnshadowedEntries(name: Name): Iterator[ScopeEntry] = {
+ lookupEntry(name) match {
+ case null => Iterator.empty
+ case e => lookupAllEntries(name) filter (e1 => (e eq e1) || (e.depth == e1.depth && e.sym != e1.sym))
+ }
}
/** lookup a symbol entry matching given name.
@@ -257,20 +314,47 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
if (hashtable ne null)
do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name)
else
- do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name);
+ do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name)
e
}
+ /** TODO - we can test this more efficiently than checking isSubScope
+ * in both directions. However the size test might be enough to quickly
+ * rule out most failures.
+ */
+ def isSameScope(other: Scope) = (
+ (size == other.size) // optimization - size is cached
+ && (this isSubScope other)
+ && (other isSubScope this)
+ )
+
+ def isSubScope(other: Scope) = {
+ def scopeContainsSym(sym: Symbol): Boolean = {
+ @tailrec def entryContainsSym(e: ScopeEntry): Boolean = e match {
+ case null => false
+ case _ =>
+ val comparableInfo = sym.info.substThis(sym.owner, e.sym.owner)
+ (e.sym.info =:= comparableInfo) || entryContainsSym(lookupNextEntry(e))
+ }
+ entryContainsSym(this lookupEntry sym.name)
+ }
+ other.toList forall scopeContainsSym
+ }
+
/** Return all symbols as a list in the order they were entered in this scope.
*/
override def toList: List[Symbol] = {
if (elemsCache eq null) {
- elemsCache = Nil
+ var symbols: List[Symbol] = Nil
+ var count = 0
var e = elems
while ((e ne null) && e.owner == this) {
- elemsCache = e.sym :: elemsCache
+ count += 1
+ symbols ::= e.sym
e = e.next
}
+ elemsCache = symbols
+ cachedSize = count
}
elemsCache
}
@@ -287,36 +371,16 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
*/
def iterator: Iterator[Symbol] = toList.iterator
-/*
- /** Does this scope contain an entry for `sym`?
- */
- def contains(sym: Symbol): Boolean = lookupAll(sym.name) contains sym
-
- /** A scope that contains all symbols of this scope and that also contains `sym`.
- */
- def +(sym: Symbol): Scope =
- if (contains(sym)) this
- else {
- val result = cloneScope
- result enter sym
- result
- }
-
- /** A scope that contains all symbols of this scope except `sym`.
- */
- def -(sym: Symbol): Scope =
- if (!contains(sym)) this
- else {
- val result = cloneScope
- result unlink sym
- result
- }
-*/
override def foreach[U](p: Symbol => U): Unit = toList foreach p
- override def filter(p: Symbol => Boolean): Scope =
- if (!(toList forall p)) newScopeWith(toList filter p: _*) else this
-
+ override def filterNot(p: Symbol => Boolean): Scope = (
+ if (toList exists p) newScopeWith(toList filterNot p: _*)
+ else this
+ )
+ override def filter(p: Symbol => Boolean): Scope = (
+ if (toList forall p) this
+ else newScopeWith(toList filter p: _*)
+ )
@deprecated("Use `toList.reverse` instead", "2.10.0")
def reverse: List[Symbol] = toList.reverse
diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala
index b782353ed3..9eb66db01e 100644
--- a/src/reflect/scala/reflect/internal/StdAttachments.scala
+++ b/src/reflect/scala/reflect/internal/StdAttachments.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
trait StdAttachments {
@@ -29,17 +30,4 @@ trait StdAttachments {
* Therefore we need this hack (see `Reshape.toPreTyperTypeTree` for a detailed explanation).
*/
case class CompoundTypeTreeOriginalAttachment(parents: List[Tree], stats: List[Tree])
-
- /** Is added by the macro engine to the results of macro expansions.
- * Stores the original expandee as it entered the `macroExpand` function.
- */
- case class MacroExpansionAttachment(original: Tree)
-
- /** When present, suppresses macro expansion for the host.
- * This is occasionally necessary, e.g. to prohibit eta-expansion of macros.
- *
- * Does not affect expandability of child nodes, there's context.withMacrosDisabled for that
- * (but think thrice before using that API - see the discussion at https://github.com/scala/scala/pull/1639).
- */
- case object SuppressMacroExpansionAttachment
}
diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala
index 5e5e4f9043..a0084dc95c 100644
--- a/src/reflect/scala/reflect/internal/StdCreators.scala
+++ b/src/reflect/scala/reflect/internal/StdCreators.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
import scala.reflect.api.{TreeCreator, TypeCreator}
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index de7af4340d..81fffc833c 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -3,10 +3,12 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import java.security.MessageDigest
+import java.util.UUID.randomUUID
import Chars.isOperatorPart
import scala.annotation.switch
import scala.language.implicitConversions
@@ -18,8 +20,6 @@ trait StdNames {
def encode(str: String): TermName = newTermNameCached(NameTransformer.encode(str))
- implicit def lowerTermNames(n: TermName): String = n.toString
-
/** Tensions: would like the keywords to be the very first names entered into the names
* storage so their ids count from 0, which simplifies the parser. Switched to abstract
* classes to avoid all the indirection which is generated with implementation-containing
@@ -37,11 +37,7 @@ trait StdNames {
kws = kws + result
result
}
- def result: Set[TermName] = {
- val result = kws
- kws = null
- result
- }
+ def result: Set[TermName] = try kws finally kws = null
}
private final object compactify extends (String => String) {
@@ -92,8 +88,12 @@ trait StdNames {
def flattenedName(segments: Name*): NameType =
compactify(segments mkString NAME_JOIN_STRING)
- val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING
- val NAME_JOIN_STRING: String = NameTransformer.NAME_JOIN_STRING
+ val NAME_JOIN_STRING: String = NameTransformer.NAME_JOIN_STRING
+ val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING
+ val SETTER_SUFFIX_STRING: String = NameTransformer.SETTER_SUFFIX_STRING
+ val LOCAL_SUFFIX_STRING: String = NameTransformer.LOCAL_SUFFIX_STRING
+ val TRAIT_SETTER_SEPARATOR_STRING: String = NameTransformer.TRAIT_SETTER_SEPARATOR_STRING
+
val SINGLETON_SUFFIX: String = ".type"
val ANON_CLASS_NAME: NameType = "$anon"
@@ -104,7 +104,6 @@ trait StdNames {
val IMPORT: NameType = "<import>"
val MODULE_SUFFIX_NAME: NameType = MODULE_SUFFIX_STRING
val MODULE_VAR_SUFFIX: NameType = "$module"
- val NAME_JOIN_NAME: NameType = NAME_JOIN_STRING
val PACKAGE: NameType = "package"
val ROOT: NameType = "<root>"
val SPECIALIZED_SUFFIX: NameType = "$sp"
@@ -121,16 +120,12 @@ trait StdNames {
final val Short: NameType = "Short"
final val Unit: NameType = "Unit"
- final val ScalaValueNames: scala.List[NameType] =
- scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
-
// some types whose companions we utilize
final val AnyRef: NameType = "AnyRef"
final val Array: NameType = "Array"
final val List: NameType = "List"
final val Seq: NameType = "Seq"
final val Symbol: NameType = "Symbol"
- final val ClassTag: NameType = "ClassTag"
final val WeakTypeTag: NameType = "WeakTypeTag"
final val TypeTag : NameType = "TypeTag"
final val Expr: NameType = "Expr"
@@ -206,10 +201,11 @@ trait StdNames {
}
abstract class TypeNames extends Keywords with TypeNamesApi {
+ override type NameType = TypeName
+
protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name)
final val BYNAME_PARAM_CLASS_NAME: NameType = "<byname>"
- final val EQUALS_PATTERN_NAME: NameType = "<equals>"
final val JAVA_REPEATED_PARAM_CLASS_NAME: NameType = "<repeated...>"
final val LOCAL_CHILD: NameType = "<local child>"
final val REFINE_CLASS_NAME: NameType = "<refinement>"
@@ -220,12 +216,10 @@ trait StdNames {
final val Any: NameType = "Any"
final val AnyVal: NameType = "AnyVal"
- final val ExprApi: NameType = "ExprApi"
final val Mirror: NameType = "Mirror"
final val Nothing: NameType = "Nothing"
final val Null: NameType = "Null"
final val Object: NameType = "Object"
- final val PartialFunction: NameType = "PartialFunction"
final val PrefixType: NameType = "PrefixType"
final val Product: NameType = "Product"
final val Serializable: NameType = "Serializable"
@@ -239,7 +233,6 @@ trait StdNames {
final val Group: NameType = "Group"
final val Tree: NameType = "Tree"
final val Type : NameType = "Type"
- final val TypeTree: NameType = "TypeTree"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -249,13 +242,11 @@ trait StdNames {
// Classfile Attributes
final val AnnotationDefaultATTR: NameType = "AnnotationDefault"
final val BridgeATTR: NameType = "Bridge"
- final val ClassfileAnnotationATTR: NameType = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
final val CodeATTR: NameType = "Code"
final val ConstantValueATTR: NameType = "ConstantValue"
final val DeprecatedATTR: NameType = "Deprecated"
final val ExceptionsATTR: NameType = "Exceptions"
final val InnerClassesATTR: NameType = "InnerClasses"
- final val LineNumberTableATTR: NameType = "LineNumberTable"
final val LocalVariableTableATTR: NameType = "LocalVariableTable"
final val RuntimeAnnotationATTR: NameType = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
final val RuntimeParamAnnotationATTR: NameType = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
@@ -272,26 +263,24 @@ trait StdNames {
}
abstract class TermNames extends Keywords with TermNamesApi {
+ override type NameType = TermName
+
protected implicit def createNameType(name: String): TermName = newTermNameCached(name)
/** Base strings from which synthetic names are derived. */
val BITMAP_PREFIX = "bitmap$"
val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$"
val DEFAULT_GETTER_STRING = "$default$"
- val DEFAULT_GETTER_INIT_STRING = "$lessinit$greater" // CONSTRUCTOR.encoded, less is more
+ val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING
val DO_WHILE_PREFIX = "doWhile$"
val EVIDENCE_PARAM_PREFIX = "evidence$"
val EXCEPTION_RESULT_PREFIX = "exceptionResult"
val EXPAND_SEPARATOR_STRING = "$$"
val INTERPRETER_IMPORT_WRAPPER = "$iw"
- val INTERPRETER_LINE_PREFIX = "line"
- val INTERPRETER_VAR_PREFIX = "res"
- val INTERPRETER_WRAPPER_SUFFIX = "$object"
val LOCALDUMMY_PREFIX = "<local " // owner of local blocks
val PROTECTED_PREFIX = "protected$"
val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set"
val SUPER_PREFIX_STRING = "super$"
- val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$"
val WHILE_PREFIX = "while$"
// Compiler internal names
@@ -300,12 +289,12 @@ trait StdNames {
val DEFAULT_CASE: NameType = "defaultCase$"
val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$"
val FAKE_LOCAL_THIS: NameType = "this$"
- val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
val LAZY_LOCAL: NameType = "$lzy"
val LAZY_SLOW_SUFFIX: NameType = "$lzycompute"
- val LOCAL_SUFFIX_STRING = " "
+ val MACRO_INVOKER_PACKAGE: NameType = "scala.reflect.macros.synthetic"
+ // TODO: if I use dollars in MACRO_INVOKER_SUFFIX, as in "$Invoker$", then Scala reflection fails to load implementations
+ val MACRO_INVOKER_SUFFIX: NameType = "Invoker"
val UNIVERSE_BUILD_PREFIX: NameType = "$u.build."
- val UNIVERSE_BUILD: NameType = "$u.build"
val UNIVERSE_PREFIX: NameType = "$u."
val UNIVERSE_SHORT: NameType = "$u"
val MIRROR_PREFIX: NameType = "$m."
@@ -318,21 +307,16 @@ trait StdNames {
val MIXIN_CONSTRUCTOR: NameType = "$init$"
val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
val OUTER: NameType = "$outer"
- val OUTER_LOCAL: NameType = OUTER + LOCAL_SUFFIX_STRING // "$outer ", note the space
+ val OUTER_LOCAL: NameType = OUTER.localName
val OUTER_SYNTH: NameType = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
val ROOTPKG: NameType = "_root_"
val SELECTOR_DUMMY: NameType = "<unapply-selector>"
val SELF: NameType = "$this"
- val SETTER_SUFFIX: NameType = encode("_=")
+ val SETTER_SUFFIX: NameType = NameTransformer.SETTER_SUFFIX_STRING
val SPECIALIZED_INSTANCE: NameType = "specInstance$"
val STAR: NameType = "*"
val THIS: NameType = "_$this"
- @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
- def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
- @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
- def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
-
def isConstructorName(name: Name) = name == CONSTRUCTOR || name == MIXIN_CONSTRUCTOR
def isExceptionResultName(name: Name) = name startsWith EXCEPTION_RESULT_PREFIX
def isImplClassName(name: Name) = name endsWith IMPL_CLASS_SUFFIX
@@ -340,7 +324,6 @@ trait StdNames {
def isLocalName(name: Name) = name endsWith LOCAL_SUFFIX_STRING
def isLoopHeaderLabel(name: Name) = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
def isProtectedAccessorName(name: Name) = name startsWith PROTECTED_PREFIX
- def isSuperAccessorName(name: Name) = name startsWith SUPER_PREFIX_STRING
def isReplWrapperName(name: Name) = name containsName INTERPRETER_IMPORT_WRAPPER
def isSetterName(name: Name) = name endsWith SETTER_SUFFIX
def isTraitSetterName(name: Name) = isSetterName(name) && (name containsName TRAIT_SETTER_SEPARATOR_STRING)
@@ -357,33 +340,32 @@ trait StdNames {
)
}
- def isDeprecatedIdentifierName(name: Name) = name.toTermName match {
- case nme.`then` | nme.`macro` => true
- case _ => false
- }
-
def isOpAssignmentName(name: Name) = name match {
case raw.NE | raw.LE | raw.GE | EMPTY => false
case _ =>
name.endChar == '=' && name.startChar != '=' && isOperatorPart(name.startChar)
}
- /** The expanded name of `name` relative to this class `base` with given `separator`
- */
- def expandedName(name: TermName, base: Symbol, separator: String = EXPAND_SEPARATOR_STRING): TermName =
+ private def expandedNameInternal(name: TermName, base: Symbol, separator: String): TermName =
newTermNameCached(base.fullName('$') + separator + name)
+ /** The expanded name of `name` relative to this class `base`
+ */
+ def expandedName(name: TermName, base: Symbol) = expandedNameInternal(name, base, EXPAND_SEPARATOR_STRING)
+
/** The expanded setter name of `name` relative to this class `base`
*/
- def expandedSetterName(name: TermName, base: Symbol): TermName =
- expandedName(name, base, separator = TRAIT_SETTER_SEPARATOR_STRING)
+ def expandedSetterName(name: TermName, base: Symbol) = expandedNameInternal(name, base, TRAIT_SETTER_SEPARATOR_STRING)
- /** If `name` is an expandedName name, the original name.
- * Otherwise `name` itself.
- */
- def originalName(name: Name): Name = name.toString lastIndexOf "$$" match {
- case -1 | 0 => name
- case idx0 =>
+ /** If `name` is an expandedName name, the original (unexpanded) name.
+ * Otherwise `name` itself.
+ * Look backward from the end of the string for "$$", and take the
+ * part of the string after that; but if the string is "$$$" or longer,
+ * be sure to retain the extra dollars.
+ */
+ def unexpandedName(name: Name): Name = name lastIndexOf "$$" match {
+ case 0 | -1 => name
+ case idx0 =>
// Sketchville - We've found $$ but if it's part of $$$ or $$$$
// or something we need to keep the bonus dollars, so e.g. foo$$$outer
// has an original name of $outer.
@@ -393,24 +375,26 @@ trait StdNames {
name drop idx + 2
}
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
+ def SPECIALIZED_SUFFIX_STRING = SPECIALIZED_SUFFIX.toString
+ @deprecated("Use SPECIALIZED_SUFFIX", "2.10.0")
+ def SPECIALIZED_SUFFIX_NAME: TermName = SPECIALIZED_SUFFIX.toTermName
+
+ @deprecated("Use unexpandedName", "2.11.0") def originalName(name: Name): Name = unexpandedName(name)
+ @deprecated("Use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name = name.dropModule
+ @deprecated("Use Name#dropLocal", "2.11.0") def localToGetter(name: TermName): TermName = name.dropLocal
+ @deprecated("Use Name#dropLocal", "2.11.0") def dropLocalSuffix(name: Name): TermName = name.dropLocal
+ @deprecated("Use Name#localName", "2.11.0") def getterToLocal(name: TermName): TermName = name.localName
+ @deprecated("Use Name#setterName", "2.11.0") def getterToSetter(name: TermName): TermName = name.setterName
+ @deprecated("Use Name#getterName", "2.11.0") def getterName(name: TermName): TermName = name.getterName
+ @deprecated("Use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName
+
def unspecializedName(name: Name): Name = (
if (name endsWith SPECIALIZED_SUFFIX)
- name.subName(0, name.lastIndexOf('m') - 1)
+ name.subName(0, name.lastIndexOf('m') - 1)
else name
)
- /*
- def anonNumberSuffix(name: Name): Name = {
- ("" + name) lastIndexOf '$' match {
- case -1 => nme.EMPTY
- case idx =>
- val s = name drop idx
- if (s.toString forall (_.isDigit)) s
- else nme.EMPTY
- }
- }
- */
-
/** Return the original name and the types on which this name
* is specialized. For example,
* {{{
@@ -432,51 +416,23 @@ trait StdNames {
} else
(name, "", "")
- def getterName(name: TermName): TermName = if (isLocalName(name)) localToGetter(name) else name
- def getterToLocal(name: TermName): TermName = name append LOCAL_SUFFIX_STRING
- def getterToSetter(name: TermName): TermName = name append SETTER_SUFFIX
- def localToGetter(name: TermName): TermName = name dropRight LOCAL_SUFFIX_STRING.length
-
- def dropLocalSuffix(name: Name): Name = if (name endsWith ' ') name dropRight 1 else name
-
- def setterToGetter(name: TermName): TermName = {
- val p = name.pos(TRAIT_SETTER_SEPARATOR_STRING)
- if (p < name.length)
- setterToGetter(name drop (p + TRAIT_SETTER_SEPARATOR_STRING.length))
- else
- name.subName(0, name.length - SETTER_SUFFIX.length)
- }
-
// Nominally, name$default$N, encoded for <init>
- def defaultGetterName(name: Name, pos: Int): TermName = {
- val prefix = if (isConstructorName(name)) DEFAULT_GETTER_INIT_STRING else name
- newTermName(prefix + DEFAULT_GETTER_STRING + pos)
- }
+ def defaultGetterName(name: Name, pos: Int): TermName = (
+ if (isConstructorName(name))
+ DEFAULT_GETTER_INIT_STRING + pos
+ else
+ name + DEFAULT_GETTER_STRING + pos
+ )
// Nominally, name from name$default$N, CONSTRUCTOR for <init>
- def defaultGetterToMethod(name: Name): TermName = {
- val p = name.pos(DEFAULT_GETTER_STRING)
- if (p < name.length) {
- val q = name.toTermName.subName(0, p)
- // i.e., if (q.decoded == CONSTRUCTOR.toString) CONSTRUCTOR else q
- if (q.toString == DEFAULT_GETTER_INIT_STRING) CONSTRUCTOR else q
- } else name.toTermName
- }
-
- // If the name ends with $nn where nn are
- // all digits, strip the $ and the digits.
- // Otherwise return the argument.
- def stripAnonNumberSuffix(name: Name): Name = {
- var pos = name.length
- while (pos > 0 && name.charAt(pos - 1).isDigit)
- pos -= 1
-
- if (pos <= 0 || pos == name.length || name.charAt(pos - 1) != '$') name
- else name.subName(0, pos - 1)
- }
-
- def stripModuleSuffix(name: Name): Name = (
- if (isModuleName(name)) name dropRight MODULE_SUFFIX_STRING.length else name
+ def defaultGetterToMethod(name: Name): TermName = (
+ if (name startsWith DEFAULT_GETTER_INIT_STRING)
+ nme.CONSTRUCTOR
+ else name indexOf DEFAULT_GETTER_STRING match {
+ case -1 => name.toTermName
+ case idx => name.toTermName take idx
+ }
)
+
def localDummyName(clazz: Symbol): TermName = newTermName(LOCALDUMMY_PREFIX + clazz.name + ">")
def superName(name: Name): TermName = newTermName(SUPER_PREFIX_STRING + name)
@@ -488,8 +444,6 @@ trait StdNames {
final val Nil: NameType = "Nil"
final val Predef: NameType = "Predef"
- final val ScalaRunTime: NameType = "ScalaRunTime"
- final val Some: NameType = "Some"
val _1 : NameType = "_1"
val _2 : NameType = "_2"
@@ -585,14 +539,10 @@ trait StdNames {
val Annotation: NameType = "Annotation"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
- val AppliedTypeTree: NameType = "AppliedTypeTree"
- val Apply: NameType = "Apply"
val ArrayAnnotArg: NameType = "ArrayAnnotArg"
- val Constant: NameType = "Constant"
val ConstantType: NameType = "ConstantType"
val EmptyPackage: NameType = "EmptyPackage"
val EmptyPackageClass: NameType = "EmptyPackageClass"
- val ExistentialTypeTree: NameType = "ExistentialTypeTree"
val Flag : NameType = "Flag"
val Ident: NameType = "Ident"
val Import: NameType = "Import"
@@ -601,10 +551,8 @@ trait StdNames {
val Modifiers: NameType = "Modifiers"
val NestedAnnotArg: NameType = "NestedAnnotArg"
val NoFlags: NameType = "NoFlags"
- val NoPrefix: NameType = "NoPrefix"
val NoSymbol: NameType = "NoSymbol"
val Nothing: NameType = "Nothing"
- val NoType: NameType = "NoType"
val Null: NameType = "Null"
val Object: NameType = "Object"
val RootPackage: NameType = "RootPackage"
@@ -614,17 +562,14 @@ trait StdNames {
val StringContext: NameType = "StringContext"
val This: NameType = "This"
val ThisType: NameType = "ThisType"
- val Tree : NameType = "Tree"
val Tuple2: NameType = "Tuple2"
val TYPE_ : NameType = "TYPE"
- val TypeApply: NameType = "TypeApply"
val TypeRef: NameType = "TypeRef"
val TypeTree: NameType = "TypeTree"
val UNIT : NameType = "UNIT"
val add_ : NameType = "add"
val annotation: NameType = "annotation"
val anyValClass: NameType = "anyValClass"
- val append: NameType = "append"
val apply: NameType = "apply"
val applyDynamic: NameType = "applyDynamic"
val applyDynamicNamed: NameType = "applyDynamicNamed"
@@ -632,34 +577,26 @@ trait StdNames {
val args : NameType = "args"
val argv : NameType = "argv"
val arrayClass: NameType = "arrayClass"
- val arrayElementClass: NameType = "arrayElementClass"
- val arrayValue: NameType = "arrayValue"
val array_apply : NameType = "array_apply"
val array_clone : NameType = "array_clone"
val array_length : NameType = "array_length"
val array_update : NameType = "array_update"
- val arraycopy: NameType = "arraycopy"
- val asTerm: NameType = "asTerm"
val asModule: NameType = "asModule"
- val asMethod: NameType = "asMethod"
val asType: NameType = "asType"
- val asClass: NameType = "asClass"
val asInstanceOf_ : NameType = "asInstanceOf"
val asInstanceOf_Ob : NameType = "$asInstanceOf"
- val assert_ : NameType = "assert"
- val assume_ : NameType = "assume"
val box: NameType = "box"
val build : NameType = "build"
val bytes: NameType = "bytes"
+ val c: NameType = "c"
val canEqual_ : NameType = "canEqual"
val checkInitialized: NameType = "checkInitialized"
- val ClassManifestFactory: NameType = "ClassManifestFactory"
val classOf: NameType = "classOf"
- val clone_ : NameType = if (forMSIL) "MemberwiseClone" else "clone" // sn.OClone causes checkinit failure
+ val clone_ : NameType = "clone"
val conforms: NameType = "conforms"
val copy: NameType = "copy"
+ val create: NameType = "create"
val currentMirror: NameType = "currentMirror"
- val definitions: NameType = "definitions"
val delayedInit: NameType = "delayedInit"
val delayedInitArg: NameType = "delayedInit$body"
val drop: NameType = "drop"
@@ -670,30 +607,23 @@ trait StdNames {
val equalsNumChar : NameType = "equalsNumChar"
val equalsNumNum : NameType = "equalsNumNum"
val equalsNumObject : NameType = "equalsNumObject"
- val equals_ : NameType = if (forMSIL) "Equals" else "equals"
+ val equals_ : NameType = "equals"
val error: NameType = "error"
- val eval: NameType = "eval"
val ex: NameType = "ex"
val experimental: NameType = "experimental"
val f: NameType = "f"
val false_ : NameType = "false"
val filter: NameType = "filter"
- val finalize_ : NameType = if (forMSIL) "Finalize" else "finalize"
+ val finalize_ : NameType = "finalize"
val find_ : NameType = "find"
val flagsFromBits : NameType = "flagsFromBits"
val flatMap: NameType = "flatMap"
val foreach: NameType = "foreach"
- val genericArrayOps: NameType = "genericArrayOps"
val get: NameType = "get"
- val getOrElse: NameType = "getOrElse"
- val hasNext: NameType = "hasNext"
- val hashCode_ : NameType = if (forMSIL) "GetHashCode" else "hashCode"
+ val hashCode_ : NameType = "hashCode"
val hash_ : NameType = "hash"
- val head: NameType = "head"
- val identity: NameType = "identity"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
- val info: NameType = "info"
val inlinedEquals: NameType = "inlinedEquals"
val isArray: NameType = "isArray"
val isDefinedAt: NameType = "isDefinedAt"
@@ -705,57 +635,41 @@ trait StdNames {
val lang: NameType = "lang"
val length: NameType = "length"
val lengthCompare: NameType = "lengthCompare"
- val liftedTree: NameType = "liftedTree"
- val `macro` : NameType = "macro"
- val macroThis : NameType = "_this"
val macroContext : NameType = "c"
val main: NameType = "main"
- val manifest: NameType = "manifest"
- val ManifestFactory: NameType = "ManifestFactory"
val manifestToTypeTag: NameType = "manifestToTypeTag"
val map: NameType = "map"
val materializeClassTag: NameType = "materializeClassTag"
val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
val materializeTypeTag: NameType = "materializeTypeTag"
- val mirror : NameType = "mirror"
val moduleClass : NameType = "moduleClass"
- val name: NameType = "name"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
val newFreeType: NameType = "newFreeType"
val newNestedSymbol: NameType = "newNestedSymbol"
val newScopeWith: NameType = "newScopeWith"
- val next: NameType = "next"
- val nmeNewTermName: NameType = "newTermName"
- val nmeNewTypeName: NameType = "newTypeName"
- val normalize: NameType = "normalize"
val notifyAll_ : NameType = "notifyAll"
val notify_ : NameType = "notify"
val null_ : NameType = "null"
- val ofDim: NameType = "ofDim"
- val origin: NameType = "origin"
+ val pendingSuperCall: NameType = "pendingSuperCall"
val prefix : NameType = "prefix"
val productArity: NameType = "productArity"
val productElement: NameType = "productElement"
val productIterator: NameType = "productIterator"
val productPrefix: NameType = "productPrefix"
val readResolve: NameType = "readResolve"
- val reflect : NameType = "reflect"
val reify : NameType = "reify"
val rootMirror : NameType = "rootMirror"
- val runOrElse: NameType = "runOrElse"
val runtime: NameType = "runtime"
val runtimeClass: NameType = "runtimeClass"
val runtimeMirror: NameType = "runtimeMirror"
- val sameElements: NameType = "sameElements"
val scala_ : NameType = "scala"
val selectDynamic: NameType = "selectDynamic"
val selectOverloadedMethod: NameType = "selectOverloadedMethod"
val selectTerm: NameType = "selectTerm"
val selectType: NameType = "selectType"
val self: NameType = "self"
- val setAccessible: NameType = "setAccessible"
val setAnnotations: NameType = "setAnnotations"
val setSymbol: NameType = "setSymbol"
val setType: NameType = "setType"
@@ -765,21 +679,18 @@ trait StdNames {
val staticModule : NameType = "staticModule"
val staticPackage : NameType = "staticPackage"
val synchronized_ : NameType = "synchronized"
- val tail: NameType = "tail"
- val `then` : NameType = "then"
+ val TermName: NameType = "TermName"
val this_ : NameType = "this"
val thisPrefix : NameType = "thisPrefix"
- val throw_ : NameType = "throw"
val toArray: NameType = "toArray"
- val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
- val toSeq: NameType = "toSeq"
- val toString_ : NameType = if (forMSIL) "ToString" else "toString"
+ val toString_ : NameType = "toString"
val toTypeConstructor: NameType = "toTypeConstructor"
val tpe : NameType = "tpe"
val tree : NameType = "tree"
val true_ : NameType = "true"
val typedProductIterator: NameType = "typedProductIterator"
+ val TypeName: NameType = "TypeName"
val typeTagToManifest: NameType = "typeTagToManifest"
val unapply: NameType = "unapply"
val unapplySeq: NameType = "unapplySeq"
@@ -793,14 +704,10 @@ trait StdNames {
val view_ : NameType = "view"
val wait_ : NameType = "wait"
val withFilter: NameType = "withFilter"
- val wrap: NameType = "wrap"
- val zip: NameType = "zip"
-
- val synthSwitch: NameType = "$synthSwitch"
+ val zero: NameType = "zero"
// unencoded operators
object raw {
- final val AMP : NameType = "&"
final val BANG : NameType = "!"
final val BAR : NameType = "|"
final val DOLLAR: NameType = "$"
@@ -809,7 +716,6 @@ trait StdNames {
final val MINUS: NameType = "-"
final val NE: NameType = "!="
final val PLUS : NameType = "+"
- final val SLASH: NameType = "/"
final val STAR : NameType = "*"
final val TILDE: NameType = "~"
@@ -865,14 +771,7 @@ trait StdNames {
// Grouped here so Cleanup knows what tests to perform.
val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
- val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
- val NumberOpNames = (
- Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
- ++ Set(UNARY_+, UNARY_-, UNARY_!)
- ++ ConversionNames
- ++ CommonOpNames
- )
val add: NameType = "add"
val complement: NameType = "complement"
@@ -1004,7 +903,6 @@ trait StdNames {
object fulltpnme extends TypeNames {
val RuntimeNothing: NameType = "scala.runtime.Nothing$"
val RuntimeNull: NameType = "scala.runtime.Null$"
- val JavaLangEnum: NameType = "java.lang.Enum"
}
/** Java binary names, like scala/runtime/Nothing$.
@@ -1019,16 +917,11 @@ trait StdNames {
val javanme = nme.javaKeywords
object nme extends TermNames {
-
- def isModuleVarName(name: Name): Boolean =
- stripAnonNumberSuffix(name) endsWith MODULE_VAR_SUFFIX
-
def moduleVarName(name: TermName): TermName =
newTermNameCached("" + name + MODULE_VAR_SUFFIX)
def getCause = sn.GetCause
def getClass_ = sn.GetClass
- def getComponentType = sn.GetComponentType
def getMethod_ = sn.GetMethod
def invoke_ = sn.Invoke
@@ -1041,55 +934,12 @@ trait StdNames {
val reflMethodCacheName: NameType = "reflMethod$Cache"
val reflMethodName: NameType = "reflMethod$Method"
- private val reflectionCacheNames = Set[NameType](
- reflPolyCacheName,
- reflClassCacheName,
- reflParamsCacheName,
- reflMethodCacheName,
- reflMethodName
- )
- def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
-
@deprecated("Use a method in tpnme", "2.10.0") def dropSingletonName(name: Name): TypeName = tpnme.dropSingletonName(name)
@deprecated("Use a method in tpnme", "2.10.0") def singletonName(name: Name): TypeName = tpnme.singletonName(name)
@deprecated("Use a method in tpnme", "2.10.0") def implClassName(name: Name): TypeName = tpnme.implClassName(name)
@deprecated("Use a method in tpnme", "2.10.0") def interfaceName(implname: Name): TypeName = tpnme.interfaceName(implname)
}
- abstract class SymbolNames {
- protected val stringToTermName = null
- protected val stringToTypeName = null
- protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
-
- val BeanProperty : TypeName
- val BooleanBeanProperty : TypeName
- val BoxedBoolean : TypeName
- val BoxedCharacter : TypeName
- val BoxedNumber : TypeName
- val Class : TypeName
- val Delegate : TypeName
- val IOOBException : TypeName // IndexOutOfBoundsException
- val InvTargetException : TypeName // InvocationTargetException
- val JavaSerializable : TypeName
- val MethodAsObject : TypeName
- val NPException : TypeName // NullPointerException
- val Object : TypeName
- val String : TypeName
- val Throwable : TypeName
- val ValueType : TypeName
-
- val ForName : TermName
- val GetCause : TermName
- val GetClass : TermName
- val GetClassLoader : TermName
- val GetComponentType : TermName
- val GetMethod : TermName
- val Invoke : TermName
- val JavaLang : TermName
-
- val Boxed: immutable.Map[TypeName, TypeName]
- }
-
class JavaKeywords {
private val kw = new KeywordSetBuilder
@@ -1147,7 +997,11 @@ trait StdNames {
final val keywords = kw.result
}
- private abstract class JavaNames extends SymbolNames {
+ sealed abstract class SymbolNames {
+ protected val stringToTermName = null
+ protected val stringToTypeName = null
+ protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s)
+
final val BoxedBoolean: TypeName = "java.lang.Boolean"
final val BoxedByte: TypeName = "java.lang.Byte"
final val BoxedCharacter: TypeName = "java.lang.Character"
@@ -1157,22 +1011,16 @@ trait StdNames {
final val BoxedLong: TypeName = "java.lang.Long"
final val BoxedNumber: TypeName = "java.lang.Number"
final val BoxedShort: TypeName = "java.lang.Short"
- final val Class: TypeName = "java.lang.Class"
- final val Delegate: TypeName = tpnme.NO_NAME
final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException"
final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException"
final val MethodAsObject: TypeName = "java.lang.reflect.Method"
final val NPException: TypeName = "java.lang.NullPointerException"
final val Object: TypeName = "java.lang.Object"
- final val String: TypeName = "java.lang.String"
final val Throwable: TypeName = "java.lang.Throwable"
- final val ValueType: TypeName = tpnme.NO_NAME
- final val ForName: TermName = newTermName("forName")
final val GetCause: TermName = newTermName("getCause")
final val GetClass: TermName = newTermName("getClass")
final val GetClassLoader: TermName = newTermName("getClassLoader")
- final val GetComponentType: TermName = newTermName("getComponentType")
final val GetMethod: TermName = newTermName("getMethod")
final val Invoke: TermName = newTermName("invoke")
final val JavaLang: TermName = newTermName("java.lang")
@@ -1189,52 +1037,5 @@ trait StdNames {
)
}
- private class MSILNames extends SymbolNames {
- final val BeanProperty: TypeName = tpnme.NO_NAME
- final val BooleanBeanProperty: TypeName = tpnme.NO_NAME
- final val BoxedBoolean: TypeName = "System.IConvertible"
- final val BoxedCharacter: TypeName = "System.IConvertible"
- final val BoxedNumber: TypeName = "System.IConvertible"
- final val Class: TypeName = "System.Type"
- final val Delegate: TypeName = "System.MulticastDelegate"
- final val IOOBException: TypeName = "System.IndexOutOfRangeException"
- final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException"
- final val JavaSerializable: TypeName = tpnme.NO_NAME
- final val MethodAsObject: TypeName = "System.Reflection.MethodInfo"
- final val NPException: TypeName = "System.NullReferenceException"
- final val Object: TypeName = "System.Object"
- final val String: TypeName = "System.String"
- final val Throwable: TypeName = "System.Exception"
- final val ValueType: TypeName = "System.ValueType"
-
- final val ForName: TermName = newTermName("GetType")
- final val GetCause: TermName = newTermName("InnerException") /* System.Reflection.TargetInvocationException.InnerException */
- final val GetClass: TermName = newTermName("GetType")
- final lazy val GetClassLoader: TermName = throw new UnsupportedOperationException("Scala reflection is not supported on this platform");
- final val GetComponentType: TermName = newTermName("GetElementType")
- final val GetMethod: TermName = newTermName("GetMethod")
- final val Invoke: TermName = newTermName("Invoke")
- final val JavaLang: TermName = newTermName("System")
-
- val Boxed = immutable.Map[TypeName, TypeName](
- tpnme.Boolean -> "System.Boolean",
- tpnme.Byte -> "System.SByte", // a scala.Byte is signed and a System.SByte too (unlike a System.Byte)
- tpnme.Char -> "System.Char",
- tpnme.Short -> "System.Int16",
- tpnme.Int -> "System.Int32",
- tpnme.Long -> "System.Int64",
- tpnme.Float -> "System.Single",
- tpnme.Double -> "System.Double"
- )
- }
-
- private class J2SENames extends JavaNames {
- final val BeanProperty: TypeName = "scala.beans.BeanProperty"
- final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty"
- final val JavaSerializable: TypeName = "java.io.Serializable"
- }
-
- lazy val sn: SymbolNames =
- if (forMSIL) new MSILNames
- else new J2SENames
+ lazy val sn: SymbolNames = new SymbolNames { }
}
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 5ccf81b4b5..7f9811220f 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.annotation.elidable
@@ -15,6 +16,7 @@ abstract class SymbolTable extends macros.Universe
with Names
with Symbols
with Types
+ with Variances
with Kinds
with ExistentialsAndSkolems
with FlagSets
@@ -38,6 +40,7 @@ abstract class SymbolTable extends macros.Universe
with StdAttachments
with StdCreators
with BuildUtils
+ with PrivateWithin
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
@@ -45,28 +48,33 @@ abstract class SymbolTable extends macros.Universe
def log(msg: => AnyRef): Unit
def warning(msg: String): Unit = Console.err.println(msg)
+ def inform(msg: String): Unit = Console.err.println(msg)
def globalError(msg: String): Unit = abort(msg)
def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg))
def shouldLogAtThisPhase = false
+ def isPastTyper = false
@deprecated("Give us a reason", "2.10.0")
def abort(): Nothing = abort("unknown error")
+ @deprecated("Use devWarning if this is really a warning; otherwise use log", "2.11.0")
+ def debugwarn(msg: => String): Unit = devWarning(msg)
+
/** Override with final implementation for inlining. */
- def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg)
- def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg)
+ def debuglog(msg: => String): Unit = if (settings.debug) log(msg)
+ def devWarning(msg: => String): Unit = if (settings.debug) Console.err.println(msg)
def throwableAsString(t: Throwable): String = "" + t
/** Prints a stack trace if -Ydebug or equivalent was given, otherwise does nothing. */
- def debugStack(t: Throwable): Unit = debugwarn(throwableAsString(t))
+ def debugStack(t: Throwable): Unit = devWarning(throwableAsString(t))
/** Overridden when we know more about what was happening during a failure. */
def supplementErrorMessage(msg: String): String = msg
private[scala] def printCaller[T](msg: String)(result: T) = {
Console.err.println("%s: %s\nCalled from: %s".format(msg, result,
- (new Throwable).getStackTrace.drop(2).take(15).mkString("\n")))
+ (new Throwable).getStackTrace.drop(2).take(50).mkString("\n")))
result
}
@@ -81,6 +89,16 @@ abstract class SymbolTable extends macros.Universe
result
}
@inline
+ final private[scala] def debuglogResult[T](msg: => String)(result: T): T = {
+ debuglog(msg + ": " + result)
+ result
+ }
+ @inline
+ final private[scala] def devWarningResult[T](msg: => String)(result: T): T = {
+ devWarning(msg + ": " + result)
+ result
+ }
+ @inline
final private[scala] def logResultIf[T](msg: => String, cond: T => Boolean)(result: T): T = {
if (cond(result))
log(msg + ": " + result)
@@ -108,17 +126,13 @@ abstract class SymbolTable extends macros.Universe
val global: SymbolTable.this.type = SymbolTable.this
} with util.TraceSymbolActivity
+ val treeInfo: TreeInfo { val global: SymbolTable.this.type }
+
/** Check that the executing thread is the compiler thread. No-op here,
* overridden in interactive.Global. */
@elidable(elidable.WARNING)
def assertCorrectThread() {}
- /** Are we compiling for Java SE? */
- // def forJVM: Boolean
-
- /** Are we compiling for .NET? */
- def forMSIL: Boolean = false
-
/** A last effort if symbol in a select <owner>.<name> is not found.
* This is overridden by the reflection compiler to make up a package
* when it makes sense (i.e. <owner> is a package and <name> is a term name).
@@ -139,7 +153,7 @@ abstract class SymbolTable extends macros.Universe
type RunId = Int
final val NoRunId = 0
- // sigh, this has to be public or atPhase doesn't inline.
+ // sigh, this has to be public or enteringPhase doesn't inline.
var phStack: List[Phase] = Nil
private[this] var ph: Phase = NoPhase
private[this] var per = NoPeriod
@@ -182,9 +196,6 @@ abstract class SymbolTable extends macros.Universe
/** The phase identifier of the given period. */
final def phaseId(period: Period): Phase#Id = period & 0xFF
- /** The period at the start of run that includes `period`. */
- final def startRun(period: Period): Period = period & 0xFFFFFF00
-
/** The current period. */
final def currentPeriod: Period = {
//assert(per == (currentRunId << 8) + phase.id)
@@ -202,23 +213,17 @@ abstract class SymbolTable extends macros.Universe
p != NoPhase && phase.id > p.id
/** Perform given operation at given phase. */
- @inline final def atPhase[T](ph: Phase)(op: => T): T = {
+ @inline final def enteringPhase[T](ph: Phase)(op: => T): T = {
val saved = pushPhase(ph)
try op
finally popPhase(saved)
}
+ @inline final def exitingPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph.next)(op)
+ @inline final def enteringPrevPhase[T](op: => T): T = enteringPhase(phase.prev)(op)
- /** Since when it is to be "at" a phase is inherently ambiguous,
- * a couple unambiguously named methods.
- */
- @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op)
- @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op)
- @inline final def afterCurrentPhase[T](op: => T): T = atPhase(phase.next)(op)
- @inline final def beforePrevPhase[T](op: => T): T = atPhase(phase.prev)(op)
-
- @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T =
- if (isAtPhaseAfter(target)) atPhase(target)(op) else op
+ @inline final def enteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+ if (isAtPhaseAfter(target)) enteringPhase(target)(op) else op
final def isValid(period: Period): Boolean =
period != 0 && runId(period) == currentRunId && {
@@ -231,7 +236,7 @@ abstract class SymbolTable extends macros.Universe
def noChangeInBaseClasses(it: InfoTransformer, limit: Phase#Id): Boolean = (
it.pid >= limit ||
!it.changesBaseClasses && noChangeInBaseClasses(it.next, limit)
- );
+ )
period != 0 && runId(period) == currentRunId && {
val pid = phaseId(period)
if (phase.id > pid) noChangeInBaseClasses(infoTransformers.nextFrom(pid), phase.id)
@@ -303,7 +308,6 @@ abstract class SymbolTable extends macros.Universe
object perRunCaches {
import java.lang.ref.WeakReference
- import scala.runtime.ScalaRunTime.stringOf
import scala.collection.generic.Clearable
// Weak references so the garbage collector will take care of
@@ -346,26 +350,15 @@ abstract class SymbolTable extends macros.Universe
*/
def isCompilerUniverse = false
+ @deprecated("Use enteringPhase", "2.10.0")
+ @inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op)
+ @deprecated("Use enteringPhaseNotLaterThan", "2.10.0")
+ @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T = enteringPhaseNotLaterThan(target)(op)
+
/**
* Adds the `sm` String interpolator to a [[scala.StringContext]].
*/
implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps
-
- def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
- import ClassfileConstants._
- if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
- // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
- // apparently occurs when processing v45.3 bytecode.
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- // protected in java means package protected. #3946
- if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- sym
- }
}
object SymbolTableStats {
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 45c16b7302..f3cea1fd00 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3,15 +3,17 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
-import util.Statistics
+import util.{ Statistics, shortClassOfInstance }
import Flags._
import scala.annotation.tailrec
-import scala.reflect.io.AbstractFile
+import scala.reflect.io.{ AbstractFile, NoAbstractFile }
+import Variance._
trait Symbols extends api.Symbols { self: SymbolTable =>
import definitions._
@@ -19,8 +21,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected var ids = 0
- val emptySymbolArray = new Array[Symbol](0)
-
protected def nextId() = { ids += 1; ids }
/** Used for deciding in the IDE whether we can interrupt the compiler */
@@ -70,11 +70,30 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
val originalOwner = perRunCaches.newMap[Symbol, Symbol]()
+ // TODO - don't allow the owner to be changed without checking invariants, at least
+ // when under some flag. Define per-phase invariants for owner/owned relationships,
+ // e.g. after flatten all classes are owned by package classes, there are lots and
+ // lots of these to be declared (or more realistically, discovered.)
+ protected def saveOriginalOwner(sym: Symbol) {
+ if (originalOwner contains sym) ()
+ else originalOwner(sym) = sym.rawowner
+ }
+ protected def originalEnclosingMethod(sym: Symbol): Symbol = {
+ if (sym.isMethod || sym == NoSymbol) sym
+ else {
+ val owner = originalOwner.getOrElse(sym, sym.rawowner)
+ if (sym.isLocalDummy) owner.enclClass.primaryConstructor
+ else originalEnclosingMethod(owner)
+ }
+ }
+
abstract class SymbolContextApiImpl extends SymbolContextApi {
this: Symbol =>
def isExistential: Boolean = this.isExistentiallyBound
def isParamWithDefault: Boolean = this.hasDefault
+ // `isByNameParam` is only true for a call-by-name parameter of a *method*,
+ // an argument of the primary constructor seen in the class body is excluded by `isValueParameter`
def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM)
def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT)
def isJava: Boolean = isJavaDefined
@@ -171,13 +190,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def debugFlagString: String = flagString(AllFlags)
/** String representation of symbol's variance */
- def varianceString: String =
- if (variance == 1) "+"
- else if (variance == -1) "-"
- else ""
+ def varianceString: String = variance.symbolicString
override def flagMask =
- if (settings.debug.value && !isAbstractType) AllFlags
+ if (settings.debug && !isAbstractType) AllFlags
else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
else ExplicitFlags
@@ -186,10 +202,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (isGADTSkolem) " (this is a GADT skolem)"
else ""
- def shortSymbolClass = getClass.getName.split('.').last.stripPrefix("Symbols$")
+ def shortSymbolClass = shortClassOfInstance(this)
def symbolCreationString: String = (
"%s%25s | %-40s | %s".format(
- if (settings.uniqid.value) "%06d | ".format(id) else "",
+ if (settings.uniqid) "%06d | ".format(id) else "",
shortSymbolClass,
name.decode + " in " + owner,
rawFlagString
@@ -221,7 +237,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Static constructor with info set. */
def newStaticConstructor(pos: Position): MethodSymbol =
- newConstructor(pos, STATIC) setInfo UnitClass.tpe
+ newConstructor(pos, STATIC) setInfo UnitTpe
/** Instance constructor with info set. */
def newClassConstructor(pos: Position): MethodSymbol =
@@ -249,20 +265,29 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newImport(pos: Position): TermSymbol =
newTermSymbol(nme.IMPORT, pos)
+ def newModuleVarSymbol(accessor: Symbol): TermSymbol = {
+ val newName = nme.moduleVarName(accessor.name.toTermName)
+ val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 )
+ val newInfo = accessor.tpe.finalResultType
+ val mval = newVariable(newName, accessor.pos.focus, newFlags.toLong) addAnnotation VolatileAttr
+
+ if (this.isClass)
+ mval setInfoAndEnter newInfo
+ else
+ mval setInfo newInfo
+ }
+
final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
final def newModuleAndClassSymbol(name: Name, pos: Position, flags0: FlagSet): (ModuleSymbol, ClassSymbol) = {
val flags = flags0 | MODULE
- val m = newModuleSymbol(name, pos, flags)
+ val m = newModuleSymbol(name.toTermName, pos, flags)
val c = newModuleClass(name.toTypeName, pos, flags & ModuleToClassFlags)
connectModuleToClass(m, c)
(m, c)
}
- final def newPackageSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol =
- newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol]
-
final def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol =
newClassSymbol(name, pos, newFlags).asInstanceOf[ModuleClassSymbol]
@@ -325,11 +350,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
() => { cnt += 1; nme.syntheticParamName(cnt) }
}
- /** Synthetic value parameters when parameter symbols are not available
- */
- final def newSyntheticValueParamss(argtypess: List[List[Type]]): List[List[TermSymbol]] =
- argtypess map (xs => newSyntheticValueParams(xs, freshNamer))
-
/** Synthetic value parameters when parameter symbols are not available.
* Calling this method multiple times will re-use the same parameter names.
*/
@@ -345,7 +365,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newSyntheticValueParam(argtype: Type, name: TermName = nme.syntheticParamName(1)): TermSymbol =
newValueParameter(name, owner.pos.focus, SYNTHETIC) setInfo argtype
- def newSyntheticTypeParam(): TypeSymbol = newSyntheticTypeParam("T0", 0L)
def newSyntheticTypeParam(name: String, newFlags: Long): TypeSymbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty
def newSyntheticTypeParams(num: Int): List[TypeSymbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L))
@@ -409,14 +428,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def newRefinementClass(pos: Position): RefinementClassSymbol =
createRefinementClassSymbol(pos, 0L)
- /** Create a new getter for current symbol (which must be a field)
- */
- final def newGetter: MethodSymbol = (
- owner.newMethod(nme.getterName(name.toTermName), NoPosition, getterFlags(flags))
- setPrivateWithin privateWithin
- setInfo MethodType(Nil, tpe)
- )
-
final def newErrorSymbol(name: Name): Symbol = name match {
case x: TypeName => newErrorClass(x)
case x: TermName => newErrorValue(x)
@@ -532,14 +543,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def isContravariant = false
def isCovariant = false
- def isExistentialQuantified = false
def isExistentialSkolem = false
def isExistentiallyBound = false
def isGADTSkolem = false
def isTypeParameter = false
def isTypeParameterOrSkolem = false
def isTypeSkolem = false
- def isTypeMacro = false
def isInvariant = !isCovariant && !isContravariant
/** Qualities of Terms, always false for TypeSymbols.
@@ -589,13 +598,24 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def isEffectiveRoot = false
+ /** Can this symbol only be subclassed by bottom classes? This is assessed
+ * to be the case if it is final, and any type parameters are invariant.
+ */
+ def hasOnlyBottomSubclasses = {
+ def loop(tparams: List[Symbol]): Boolean = tparams match {
+ case Nil => true
+ case x :: xs => x.variance.isInvariant && loop(xs)
+ }
+ isClass && isFinal && loop(typeParams)
+ }
+
final def isLazyAccessor = isLazy && lazyAccessor != NoSymbol
final def isOverridableMember = !(isClass || isEffectivelyFinal) && (this ne NoSymbol) && owner.isClass
/** Does this symbol denote a wrapper created by the repl? */
final def isInterpreterWrapper = (
(this hasFlag MODULE)
- && owner.isPackageClass
+ && isTopLevel
&& nme.isReplWrapperName(name)
)
final def getFlag(mask: Long): Long = {
@@ -607,6 +627,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
(flags & mask) != 0
}
+ def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong)
+
/** Does symbol have ALL the flags in `mask` set? */
final def hasAllFlags(mask: Long): Boolean = {
if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize
@@ -642,11 +664,32 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def hasGetter = isTerm && nme.isLocalName(name)
+ /** A little explanation for this confusing situation.
+ * Nested modules which have no static owner when ModuleDefs
+ * are eliminated (refchecks) are given the lateMETHOD flag,
+ * which makes them appear as methods after refchecks.
+ * Here's an example where one can see all four of FF FT TF TT
+ * for (isStatic, isMethod) at various phases.
+ *
+ * trait A1 { case class Quux() }
+ * object A2 extends A1 { object Flax }
+ * // -- namer object Quux in trait A1
+ * // -M flatten object Quux in trait A1
+ * // S- flatten object Flax in object A2
+ * // -M posterasure object Quux in trait A1
+ * // -M jvm object Quux in trait A1
+ * // SM jvm object Quux in object A2
+ *
+ * So "isModuleNotMethod" exists not for its achievement in
+ * brevity, but to encapsulate the relevant condition.
+ */
+ def isModuleNotMethod = isModule && !isMethod
+ def isStaticModule = isModuleNotMethod && isStatic
+
final def isInitializedToDefault = !isType && hasAllFlags(DEFAULTINIT | ACCESSOR)
- final def isStaticModule = isModule && isStatic && !isMethod
final def isThisSym = isTerm && owner.thisSym == this
final def isError = hasFlag(IS_ERROR)
- final def isErroneous = isError || isInitialized && tpe.isErroneous
+ final def isErroneous = isError || isInitialized && tpe_*.isErroneous
def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem
@@ -658,7 +701,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
info.firstParent.typeSymbol == AnyValClass && !isPrimitiveValueClass
final def isMethodWithExtension =
- isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isTermMacro
+ isMethod && owner.isDerivedValueClass && !isParamAccessor && !isConstructor && !hasFlag(SUPERACCESSOR) && !isMacro
final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME)
final def isDefinedInPackage = effectiveOwner.isPackageClass
@@ -713,10 +756,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr)
- def isSerializable = (
- info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
- || hasAnnotation(SerializableAttr) // last part can be removed, @serializable annotation is deprecated
- )
+ def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass)
def hasBridgeAnnotation = hasAnnotation(BridgeClass)
def isDeprecated = hasAnnotation(DeprecatedAttr)
def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0)
@@ -726,14 +766,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
= hasAnnotation(DeprecatedInheritanceAttr)
def deprecatedInheritanceMessage
= getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0)
- def deprecatedInheritanceVersion
- = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1)
def hasDeprecatedOverridingAnnotation
= hasAnnotation(DeprecatedOverridingAttr)
def deprecatedOverridingMessage
= getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0)
- def deprecatedOverridingVersion
- = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1)
// !!! when annotation arguments are not literal strings, but any sort of
// assembly of strings, there is a fair chance they will turn up here not as
@@ -751,19 +787,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def compileTimeOnlyMessage = getAnnotation(CompileTimeOnlyAttr) flatMap (_ stringArg 0)
/** Is this symbol an accessor method for outer? */
- final def isOuterAccessor = {
- hasFlag(STABLE | ARTIFACT) &&
- originalName == nme.OUTER
- }
+ final def isOuterAccessor = hasFlag(STABLE | ARTIFACT) && (unexpandedName == nme.OUTER)
/** Is this symbol an accessor method for outer? */
- final def isOuterField = {
- hasFlag(ARTIFACT) &&
- originalName == nme.OUTER_LOCAL
- }
+ final def isOuterField = isArtifact && (unexpandedName == nme.OUTER_LOCAL)
- /** Does this symbol denote a stable value? */
- def isStable = false
+ /** Does this symbol denote a stable value, ignoring volatility?
+ *
+ * Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
+ */
+ final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+ final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
/** Does this symbol denote the primary constructor of its enclosing class? */
final def isPrimaryConstructor =
@@ -813,19 +847,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isStaticOwner: Boolean =
isPackageClass || isModuleClass && isStatic
- def isTopLevelModule = hasFlag(MODULE) && owner.isPackageClass
+ /** A helper function for isEffectivelyFinal. */
+ private def isNotOverridden = (
+ owner.isClass && (
+ owner.isEffectivelyFinal
+ || owner.isSealed && owner.children.forall(c => c.isEffectivelyFinal && (overridingSymbol(c) == NoSymbol))
+ )
+ )
/** Is this symbol effectively final? I.e, it cannot be overridden */
final def isEffectivelyFinal: Boolean = (
(this hasFlag FINAL | PACKAGE)
- || isModuleOrModuleClass && (owner.isPackageClass || !settings.overrideObjects.value)
+ || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects)
|| isTerm && (
isPrivate
|| isLocal
- || owner.isClass && owner.isEffectivelyFinal
- )
+ || isNotOverridden
+ )
)
+ /** Is this symbol owned by a package? */
+ final def isTopLevel = owner.isPackageClass
+
/** Is this symbol locally defined? I.e. not accessed from outside `this` instance */
final def isLocal: Boolean = owner.isTerm
@@ -843,41 +886,36 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def isLocalClass = false
- def isStableClass = false
-
-/* code for fixing nested objects
- override final def isModuleClass: Boolean =
- super.isModuleClass && !isExpandedModuleClass
-*/
/** Is this class or type defined as a structural refinement type?
*/
final def isStructuralRefinement: Boolean =
- (isClass || isType || isModule) && info.normalize/*.underlying*/.isStructuralRefinement
+ (isClass || isType || isModule) && info.dealiasWiden/*.underlying*/.isStructuralRefinement
/** Is this a term symbol only defined in a refinement (so that it needs
* to be accessed by reflection)?
*/
- def isOnlyRefinementMember: Boolean =
- isTerm && // type members are not affected
- owner.isRefinementClass && // owner must be a refinement class
- (owner.info decl name) == this && // symbol must be explicitly declared in the refinement (not synthesized from glb)
- allOverriddenSymbols.isEmpty && // symbol must not override a symbol in a base class
- !isConstant // symbol must not be a constant. Question: Can we exclude @inline methods as well?
+ def isOnlyRefinementMember = (
+ isTerm // Type members are unaffected
+ && owner.isRefinementClass // owner must be a refinement class
+ && isPossibleInRefinement // any overridden symbols must also have refinement class owners
+ && !isConstant // Must not be a constant. Question: Can we exclude @inline methods as well?
+ && isDeclaredByOwner // Must be explicitly declared in the refinement (not synthesized from glb)
+ )
+ // "(owner.info decl name) == this" is inadequate, because "name" might
+ // be overloaded in owner - and this might be an overloaded symbol.
+ // TODO - make this cheaper and see where else we should be doing something similar.
+ private def isDeclaredByOwner = (owner.info decl name).alternatives exists (alternatives contains _)
final def isStructuralRefinementMember = owner.isStructuralRefinement && isPossibleInRefinement && isPublic
- final def isPossibleInRefinement = !isConstructor && !isOverridingSymbol
-
- /** Is this symbol a member of class `clazz`? */
- def isMemberOf(clazz: Symbol) =
- clazz.info.member(name).alternatives contains this
+ final def isPossibleInRefinement = (
+ !isConstructor
+ && allOverriddenSymbols.forall(_.owner.isRefinementClass) // this includes allOverriddenSymbols.isEmpty
+ )
/** A a member of class `base` is incomplete if
* (1) it is declared deferred or
* (2) it is abstract override and its super symbol in `base` is
* nonexistent or incomplete.
- *
- * @param base ...
- * @return ...
*/
final def isIncompleteIn(base: Symbol): Boolean =
this.isDeferred ||
@@ -886,9 +924,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
supersym == NoSymbol || supersym.isIncompleteIn(base)
}
- // Does not always work if the rawInfo is a SourcefileLoader, see comment
- // in "def coreClassesFirst" in Global.
- def exists = !owner.isPackageClass || { rawInfo.load(this); rawInfo != NoType }
+ def exists: Boolean = !isTopLevel || {
+ val isSourceLoader = rawInfo match {
+ case sl: SymLoader => sl.fromSource
+ case _ => false
+ }
+ def warnIfSourceLoader() {
+ if (isSourceLoader)
+ // Predef is completed early due to its autoimport; we used to get here when type checking its
+ // parent LowPriorityImplicits. See comment in c5441dc for more elaboration.
+ // Since the fix for SI-7335 Predef parents must be defined in Predef.scala, and we should not
+ // get here anymore.
+ devWarning(s"calling Symbol#exists with sourcefile based symbol loader may give incorrect results.");
+ }
+
+ rawInfo load this
+ rawInfo != NoType || { warnIfSourceLoader(); false }
+ }
final def isInitialized: Boolean =
validTo != NoPeriod
@@ -914,14 +966,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (isAliasType) return true
if (isType && isNonClassType) return false
if (isRefinementClass) return false
- return true
+ true
}
- /** The variance of this symbol as an integer */
- final def variance: Int =
- if (isCovariant) 1
- else if (isContravariant) -1
- else 0
+ /** The variance of this symbol. */
+ def variance: Variance =
+ if (isCovariant) Covariant
+ else if (isContravariant) Contravariant
+ else Invariant
/** The sequence number of this parameter symbol among all type
* and value parameters of symbol's owner. -1 if symbol does not
@@ -950,18 +1002,15 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
rawowner
}
+ // Like owner, but NoSymbol.owner == NoSymbol instead of throwing an exception.
+ final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner
+
// TODO - don't allow the owner to be changed without checking invariants, at least
// when under some flag. Define per-phase invariants for owner/owned relationships,
// e.g. after flatten all classes are owned by package classes, there are lots and
// lots of these to be declared (or more realistically, discovered.)
def owner_=(owner: Symbol) {
- // don't keep the original owner in presentation compiler runs
- // (the map will grow indefinitely, and the only use case is the
- // backend).
- if (!forInteractive) {
- if (originalOwner contains this) ()
- else originalOwner(this) = rawowner
- }
+ saveOriginalOwner(this)
assert(isCompilerUniverse, "owner_= is not thread-safe; cannot be run in reflexive code")
if (traceSymbolActivity)
traceSymbols.recordNewSymbolOwner(this, owner)
@@ -991,10 +1040,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ name attribute --------------------------------------------------------------
- /** If this symbol has an expanded name, its original name, otherwise its name itself.
- * @see expandName
+ @deprecated("Use unexpandedName", "2.11.0") def originalName: Name = unexpandedName
+
+ /** If this symbol has an expanded name, its original (unexpanded) name,
+ * otherwise the name itself.
*/
- def originalName: Name = nme.originalName(nme.dropLocalSuffix(name))
+ def unexpandedName: Name = nme.unexpandedName(name)
/** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`.
*/
@@ -1002,7 +1053,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`.
*/
- def decodedName: String = nme.dropLocalSuffix(name).decode
+ def decodedName: String = name.decode
private def addModuleSuffix(n: Name): Name =
if (needsModuleSuffix) n append nme.MODULE_SUFFIX_STRING else n
@@ -1021,7 +1072,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
/** These should be moved somewhere like JavaPlatform.
*/
- def javaSimpleName: Name = addModuleSuffix(nme.dropLocalSuffix(simpleName))
+ def javaSimpleName: Name = addModuleSuffix(simpleName.dropLocal)
def javaBinaryName: Name = addModuleSuffix(fullNameInternal('/'))
def javaClassName: String = addModuleSuffix(fullNameInternal('.')).toString
@@ -1042,7 +1093,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else ((effectiveOwner.enclClass.fullNameAsName(separator) append separator): Name) append name
)
- def fullNameAsName(separator: Char): Name = nme.dropLocalSuffix(fullNameInternal(separator))
+ def fullNameAsName(separator: Char): Name = fullNameInternal(separator).dropLocal
/** The encoded full path name of this symbol, where outer names and inner names
* are separated by periods.
@@ -1080,9 +1131,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
protected def createImplClassSymbol(name: TypeName, pos: Position, newFlags: Long): ClassSymbol =
new ClassSymbol(this, pos, name) with ImplClassSymbol initFlags newFlags
- protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
- new TermSymbol(this, pos, name) initFlags newFlags
-
protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
new MethodSymbol(this, pos, name) initFlags newFlags
@@ -1099,12 +1147,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
new TermSymbol(this, pos, name) initFlags newFlags
final def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = {
- if ((newFlags & METHOD) != 0)
- createMethodSymbol(name, pos, newFlags)
- else if ((newFlags & PACKAGE) != 0)
+ // Package before Module, Module before Method, or we might grab the wrong guy.
+ if ((newFlags & PACKAGE) != 0)
createPackageSymbol(name, pos, newFlags | PackageFlags)
else if ((newFlags & MODULE) != 0)
createModuleSymbol(name, pos, newFlags)
+ else if ((newFlags & METHOD) != 0)
+ createMethodSymbol(name, pos, newFlags)
else if ((newFlags & PARAM) != 0)
createValueParameterSymbol(name, pos, newFlags)
else
@@ -1185,20 +1234,61 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
}
- /** Get type. The type of a symbol is:
- * for a type symbol, the type corresponding to the symbol itself,
- * @M you should use tpeHK for a type symbol with type parameters if
- * the kind of the type need not be *, as tpe introduces dummy arguments
- * to generate a type of kind *
- * for a term symbol, its usual type.
- * See the tpe/tpeHK overrides in TypeSymbol for more.
+ /** The "type" of this symbol. The type of a term symbol is its usual
+ * type. A TypeSymbol is more complicated; see that class for elaboration.
+ * Since tpe forwards to tpe_*, if you call it on a type symbol with unapplied
+ * type parameters, the type returned will contain dummies types. These will
+ * hide legitimate errors or create spurious ones if used as normal types.
*
* For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
* to the type symbol, `info` returns the type information of the type symbol,
* e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
*/
- def tpe: Type = info
- def tpeHK: Type = tpe
+ final def tpe: Type = tpe_*
+
+ /** typeConstructor throws an exception when called on term
+ * symbols; this is a more forgiving alternative. Calls
+ * typeConstructor on TypeSymbols, returns info otherwise.
+ */
+ def tpeHK: Type = info
+
+ /** Only applicable to TypeSymbols, it is the type corresponding
+ * to the symbol itself. For instance, the type of a List might
+ * be List[Int] - the same symbol's typeConstructor is simply List.
+ * One might be tempted to write that as List[_], and in some
+ * contexts this is possible, but it is discouraged because it is
+ * syntactically indistinguishable from and easily confused with the
+ * type List[T] forSome { type T; }, which can also be written List[_].
+ */
+ def typeConstructor: Type = (
+ // Avoiding a third override in NoSymbol to preserve bimorphism
+ if (this eq NoSymbol)
+ abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
+ else
+ abort("typeConstructor inapplicable for " + this)
+ )
+
+ /** The type of this symbol, guaranteed to be of kind *.
+ * If there are unapplied type parameters, they will be
+ * substituted with dummy type arguments derived from the
+ * type parameters. Such types are not valid in a general
+ * sense and will cause difficult-to-find bugs if allowed
+ * to roam free.
+ *
+ * If you call tpe_* explicitly to obtain these types,
+ * you are responsible for them as if it they were your own
+ * minor children.
+ */
+ def tpe_* : Type = info
+
+ // Alternate implementation of def tpe for warning about misuse,
+ // disabled to keep the method maximally hotspot-friendly:
+ // def tpe: Type = {
+ // val result = tpe_*
+ // if (settings.debug.value && result.typeArgs.nonEmpty)
+ // printCaller(s"""Call to ${this.tpe} created $result: call tpe_* or tpeHK""")("")
+ // result
+ // }
/** Get type info associated with symbol at current phase, after
* ensuring that symbol is initialized (i.e. type is completed).
@@ -1234,13 +1324,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
cnt += 1
// allow for two completions:
// one: sourceCompleter to LazyType, two: LazyType to completed type
- if (cnt == 3) abort("no progress in completing " + this + ":" + tp)
+ if (cnt == 3) abort(s"no progress in completing $this: $tp")
}
rawInfo
}
catch {
case ex: CyclicReference =>
- debugwarn("... hit cycle trying to complete " + this.fullLocationString)
+ devWarning("... hit cycle trying to complete " + this.fullLocationString)
throw ex
}
@@ -1252,9 +1342,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** Set initial info. */
- def setInfo(info: Type): this.type = { info_=(info); this }
+ def setInfo(info: Type): this.type = { info_=(info); this }
/** Modifies this symbol's info in place. */
- def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
+ def modifyInfo(f: Type => Type): this.type = setInfo(f(info))
/** Substitute second list of symbols for first in current info. */
def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type =
if (syms0.isEmpty) this
@@ -1365,6 +1455,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (!isInitialized) info
this
}
+ def maybeInitialize = {
+ try { initialize ; true }
+ catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false }
+ }
/** Called when the programmer requests information that might require initialization of the underlying symbol.
*
@@ -1407,14 +1501,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
!isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask)
/** Was symbol's type updated during given phase? */
- final def isUpdatedAt(pid: Phase#Id): Boolean = {
- assert(isCompilerUniverse)
- var infos = this.infos
- while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev
- infos ne null
- }
-
- /** Was symbol's type updated during given phase? */
final def hasTypeAt(pid: Phase#Id): Boolean = {
assert(isCompilerUniverse)
var infos = this.infos
@@ -1427,21 +1513,20 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* This is done in checkAccessible and overriding checks in refchecks
* We can't do this on class loading because it would result in infinite cycles.
*/
- final def cookJavaRawInfo() {
- if (hasFlag(TRIEDCOOKING)) return else setFlag(TRIEDCOOKING) // only try once...
- val oldInfo = info
- doCookJavaRawInfo()
- }
+ def cookJavaRawInfo(): this.type = {
+ // only try once...
+ if (phase.erasedTypes || (this hasFlag TRIEDCOOKING))
+ return this
- protected def doCookJavaRawInfo(): Unit
+ this setFlag TRIEDCOOKING
+ info // force the current info
+ if (isJavaDefined || isType && owner.isJavaDefined)
+ this modifyInfo rawToExistential
+ else if (isOverloaded)
+ alternatives withFilter (_.isJavaDefined) foreach (_ modifyInfo rawToExistential)
- /** The type constructor of a symbol is:
- * For a type symbol, the type corresponding to the symbol itself,
- * excluding parameters.
- * Not applicable for term symbols.
- */
- def typeConstructor: Type =
- abort("typeConstructor inapplicable for " + this)
+ this
+ }
/** The logic approximately boils down to finding the most recent phase
* which immediately follows any of parser, namer, typer, or erasure.
@@ -1465,7 +1550,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def unsafeTypeParams: List[Symbol] =
if (isMonomorphicType) Nil
- else atPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
+ else enteringPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
/** The type parameters of this symbol.
* assumption: if a type starts out as monomorphic, it will not acquire
@@ -1477,9 +1562,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// analogously to the "info" getter, here we allow for two completions:
// one: sourceCompleter to LazyType, two: LazyType to completed type
if (validTo == NoPeriod)
- atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+ enteringPhase(phaseOf(infos.validFrom))(rawInfo load this)
if (validTo == NoPeriod)
- atPhase(phaseOf(infos.validFrom))(rawInfo load this)
+ enteringPhase(phaseOf(infos.validFrom))(rawInfo load this)
rawInfo.typeParams
}
@@ -1546,7 +1631,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def makeSerializable() {
info match {
case ci @ ClassInfoType(_, _, _) =>
- setInfo(ci.copy(parents = ci.parents :+ SerializableClass.tpe))
+ setInfo(ci.copy(parents = ci.parents :+ SerializableTpe))
case i =>
abort("Only ClassInfoTypes can be made serializable: "+ i)
}
@@ -1636,6 +1721,19 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* and is this class symbol also different from Null or Nothing? */
def isNonBottomSubClass(that: Symbol): Boolean = false
+ /** Is this class symbol Null or Nothing,
+ * and (if Null) is `that` inhabited by null?
+ * If this is Nothing, of course, it is a
+ * subclass of `that` by definition.
+ *
+ * TODO - what is implied by the fact that AnyVal now has
+ * infinitely many non-bottom subclasses, not only 9?
+ */
+ def isBottomSubClass(that: Symbol) = (
+ (this eq NothingClass)
+ || (this eq NullClass) && that.isClass && (that ne NothingClass) && !(that isNonBottomSubClass AnyValClass)
+ )
+
/** Overridden in NullClass and NothingClass for custom behavior.
*/
def isSubClass(that: Symbol) = isNonBottomSubClass(that)
@@ -1654,12 +1752,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def filter(cond: Symbol => Boolean): Symbol =
if (isOverloaded) {
- val alts = alternatives
- val alts1 = alts filter cond
- if (alts1 eq alts) this
+ var changed = false
+ var alts0: List[Symbol] = alternatives
+ var alts1: List[Symbol] = Nil
+
+ while (alts0.nonEmpty) {
+ if (cond(alts0.head))
+ alts1 ::= alts0.head
+ else
+ changed = true
+
+ alts0 = alts0.tail
+ }
+
+ if (!changed) this
else if (alts1.isEmpty) NoSymbol
else if (alts1.tail.isEmpty) alts1.head
- else owner.newOverloaded(info.prefix, alts1)
+ else owner.newOverloaded(info.prefix, alts1.reverse)
}
else if (cond(this)) this
else NoSymbol
@@ -1740,10 +1849,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def thisSym: Symbol = this
/** The type of `this` in a class, or else the type of the symbol itself. */
- def typeOfThis = thisSym.tpe
+ def typeOfThis = thisSym.tpe_*
- /** If symbol is a class, the type <code>this.type</code> in this class,
- * otherwise <code>NoPrefix</code>.
+ /** If symbol is a class, the type `this.type` in this class,
+ * otherwise `NoPrefix`.
* We always have: thisType <:< typeOfThis
*/
def thisType: Type = NoPrefix
@@ -1764,7 +1873,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//
// The slightly more principled approach of using the paramss of the
// primary constructor leads to cycles in, for example, pos/t5084.scala.
- val primaryNames = constrParamAccessors.map(acc => nme.dropLocalSuffix(acc.name))
+ val primaryNames = constrParamAccessors map (_.name.dropLocal)
caseFieldAccessorsUnsorted.sortBy { acc =>
primaryNames indexWhere { orig =>
(acc.name == orig) || (acc.name startsWith (orig append "$"))
@@ -1783,7 +1892,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The symbol accessed by this accessor function, but with given owner type. */
final def accessed(ownerTp: Type): Symbol = {
assert(hasAccessorFlag, this)
- ownerTp decl nme.getterToLocal(getterName.toTermName)
+ ownerTp decl localName
}
/** The module corresponding to this module class (note that this
@@ -1877,15 +1986,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* originalOwner map is not populated for memory considerations (the symbol
* may hang on to lazy types and in turn to whole (outdated) compilation units.
*/
- def originalEnclosingMethod: Symbol = {
- assert(!forInteractive, "originalOwner is not kept in presentation compiler runs.")
- if (isMethod) this
- else {
- val owner = originalOwner.getOrElse(this, rawowner)
- if (isLocalDummy) owner.enclClass.primaryConstructor
- else owner.originalEnclosingMethod
- }
- }
+ def originalEnclosingMethod: Symbol = Symbols.this.originalEnclosingMethod(this)
/** The method or class which logically encloses the current symbol.
* If the symbol is defined in the initialization part of a template
@@ -1922,7 +2023,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The top-level class containing this symbol. */
def enclosingTopLevelClass: Symbol =
- if (owner.isPackageClass) {
+ if (isTopLevel) {
if (isClass) this else moduleClass
} else owner.enclosingTopLevelClass
@@ -1931,11 +2032,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
(this.rawInfo ne NoType)
&& (this.effectiveOwner == that.effectiveOwner)
&& ( !this.effectiveOwner.isPackageClass
- || (this.sourceFile eq null)
- || (that.sourceFile eq null)
- || (this.sourceFile.path == that.sourceFile.path) // Cheap possibly wrong check, then expensive normalization
- || (this.sourceFile.canonicalPath == that.sourceFile.canonicalPath)
- )
+ || (this.associatedFile eq NoAbstractFile)
+ || (that.associatedFile eq NoAbstractFile)
+ || (this.associatedFile.path == that.associatedFile.path) // Cheap possibly wrong check, then expensive normalization
+ || (this.associatedFile.canonicalPath == that.associatedFile.canonicalPath)
+ )
)
/** The internal representation of classes and objects:
@@ -2031,70 +2132,111 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* @param ofclazz The class containing the symbol's definition
* @param site The base type from which member types are computed
*/
- final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol = {
- //OPT cut down on #closures by special casing non-overloaded case
- // was: ofclazz.info.nonPrivateDecl(name) filter (sym =>
- // !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
- val result = ofclazz.info.nonPrivateDecl(name)
- def qualifies(sym: Symbol) = !sym.isTerm || (site.memberType(this) matches site.memberType(sym))
- if ((result eq NoSymbol) || !result.isOverloaded && qualifies(result)) result
- else result filter qualifies
- }
+ final def matchingSymbol(ofclazz: Symbol, site: Type): Symbol =
+ matchingSymbolInternal(site, ofclazz.info nonPrivateDecl name)
/** The non-private member of `site` whose type and name match the type of this symbol. */
final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
- site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
- !sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
+ matchingSymbolInternal(site, site.nonPrivateMemberAdmitting(name, admit))
- /** The symbol, in class `ofclazz`, that is overridden by this symbol.
+ private def matchingSymbolInternal(site: Type, candidate: Symbol): Symbol = {
+ def qualifies(sym: Symbol) = !sym.isTerm || ((site memberType this) matches (site memberType sym))
+ //OPT cut down on #closures by special casing non-overloaded case
+ if (candidate.isOverloaded) candidate filter qualifies
+ else if (qualifies(candidate)) candidate
+ else NoSymbol
+ }
+
+ /** The symbol, in class `baseClass`, that is overridden by this symbol.
*
- * @param ofclazz is a base class of this symbol's owner.
+ * @param baseClass is a base class of this symbol's owner.
*/
- final def overriddenSymbol(ofclazz: Symbol): Symbol =
- if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, owner.thisType)
+ final def overriddenSymbol(baseClass: Symbol): Symbol = (
+ // concrete always overrides abstract, so don't let an abstract definition
+ // claim to be overriding an inherited concrete one.
+ matchingInheritedSymbolIn(baseClass) filter (res => res.isDeferred || !this.isDeferred)
+ )
+
+ private def matchingInheritedSymbolIn(baseClass: Symbol): Symbol =
+ if (canMatchInheritedSymbols) matchingSymbol(baseClass, owner.thisType) else NoSymbol
/** The symbol overriding this symbol in given subclass `ofclazz`.
*
* @param ofclazz is a subclass of this symbol's owner
*/
- final def overridingSymbol(ofclazz: Symbol): Symbol =
- if (isClassConstructor) NoSymbol else matchingSymbol(ofclazz, ofclazz.thisType)
+ final def overridingSymbol(ofclazz: Symbol): Symbol = (
+ if (canMatchInheritedSymbols)
+ matchingSymbol(ofclazz, ofclazz.thisType)
+ else
+ NoSymbol
+ )
+
+ /** If false, this symbol cannot possibly participate in an override,
+ * either as overrider or overridee. For internal use; you should consult
+ * with isOverridingSymbol. This is used by isOverridingSymbol to escape
+ * the recursive knot.
+ */
+ private def canMatchInheritedSymbols = (
+ (this ne NoSymbol)
+ && owner.isClass
+ && !this.isClass
+ && !this.isConstructor
+ )
- /** Returns all symbols overriden by this symbol. */
- final def allOverriddenSymbols: List[Symbol] =
- if (!owner.isClass) Nil
- else owner.ancestors map overriddenSymbol filter (_ != NoSymbol)
+ // All the symbols overridden by this symbol and this symbol at the head,
+ // or Nil if this is NoSymbol.
+ def overrideChain = (
+ if (this eq NoSymbol) Nil
+ else if (isOverridingSymbol) this :: allOverriddenSymbols
+ else this :: Nil
+ )
+
+ /** Returns all symbols overridden by this symbol. */
+ final def allOverriddenSymbols: List[Symbol] = {
+ def loop(xs: List[Symbol]): List[Symbol] = xs match {
+ case Nil => Nil
+ case x :: xs =>
+ overriddenSymbol(x) match {
+ case NoSymbol => loop(xs)
+ case sym => sym :: loop(xs)
+ }
+ }
+ if (isOverridingSymbol) loop(owner.ancestors) else Nil
+ }
/** Equivalent to allOverriddenSymbols.nonEmpty, but more efficient. */
- // !!! When if ever will this answer differ from .isOverride?
- // How/where is the OVERRIDE flag managed, as compared to how checks
- // based on type membership will evaluate?
- def isOverridingSymbol = owner.isClass && (
- owner.ancestors exists (cls => matchingSymbol(cls, owner.thisType) != NoSymbol)
+ lazy val isOverridingSymbol = (
+ canMatchInheritedSymbols
+ && owner.ancestors.exists(base => overriddenSymbol(base) != NoSymbol)
)
+
/** Equivalent to allOverriddenSymbols.head (or NoSymbol if no overrides) but more efficient. */
def nextOverriddenSymbol: Symbol = {
- if (owner.isClass) owner.ancestors foreach { base =>
- val sym = overriddenSymbol(base)
- if (sym != NoSymbol)
- return sym
+ @tailrec def loop(bases: List[Symbol]): Symbol = bases match {
+ case Nil => NoSymbol
+ case base :: rest =>
+ val sym = overriddenSymbol(base)
+ if (sym == NoSymbol) loop(rest) else sym
}
- NoSymbol
+ if (isOverridingSymbol) loop(owner.ancestors) else NoSymbol
}
/** Returns all symbols overridden by this symbol, plus all matching symbols
* defined in parents of the selftype.
*/
- final def extendedOverriddenSymbols: List[Symbol] =
- if (!owner.isClass) Nil
- else owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+ final def extendedOverriddenSymbols: List[Symbol] = (
+ if (canMatchInheritedSymbols)
+ owner.thisSym.ancestors map overriddenSymbol filter (_ != NoSymbol)
+ else
+ Nil
+ )
/** The symbol accessed by a super in the definition of this symbol when
* seen from class `base`. This symbol is always concrete.
* pre: `this.owner` is in the base class sequence of `base`.
*/
final def superSymbol(base: Symbol): Symbol = {
- var bcs = base.info.baseClasses.dropWhile(owner != _).tail
+ var bcs = base.info.baseClasses dropWhile (owner != _) drop 1
var sym: Symbol = NoSymbol
while (!bcs.isEmpty && sym == NoSymbol) {
if (!bcs.head.isImplClass)
@@ -2107,22 +2249,23 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** The getter of this value or setter definition in class `base`, or NoSymbol if
* none exists.
*/
- final def getter(base: Symbol): Symbol = base.info.decl(getterName) filter (_.hasAccessorFlag)
+ final def getter(base: Symbol): Symbol =
+ base.info decl getterName filter (_.hasAccessorFlag)
- def getterName: TermName = (
- if (isSetter) nme.setterToGetter(name.toTermName)
- else if (nme.isLocalName(name)) nme.localToGetter(name.toTermName)
- else name.toTermName
- )
+ def getterName: TermName = name.getterName
+ def setterName: TermName = name.setterName
+ def localName: TermName = name.localName
/** The setter of this value or getter definition, or NoSymbol if none exists */
- final def setter(base: Symbol): Symbol = setter(base, false)
+ final def setter(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol =
+ base.info decl setterNameInBase(base, hasExpandedName) filter (_.hasAccessorFlag)
- final def setter(base: Symbol, hasExpandedName: Boolean): Symbol = {
- var sname = nme.getterToSetter(nme.getterName(name.toTermName))
- if (hasExpandedName) sname = nme.expandedSetterName(sname, base)
- base.info.decl(sname) filter (_.hasAccessorFlag)
- }
+ def needsExpandedSetterName = (
+ if (isMethod) hasStableFlag && !isLazy
+ else hasNoFlags(LAZY | MUTABLE)
+ )
+ def setterNameInBase(base: Symbol, expanded: Boolean): TermName =
+ if (expanded) nme.expandedSetterName(setterName, base) else setterName
/** If this is a derived value class, return its unbox method
* or NoSymbol if it does not exist.
@@ -2179,29 +2322,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case p :: _ => p
case _ => NoSymbol
}
-/* code for fixing nested objects
- def expandModuleClassName() {
- name = newTypeName(name.toString + "$")
- }
- def isExpandedModuleClass: Boolean = name(name.length - 1) == '$'
-*/
+ // Desire to re-use the field in ClassSymbol which stores the source
+ // file to also store the classfile, but without changing the behavior
+ // of sourceFile (which is expected at least in the IDE only to
+ // return actual source code.) So sourceFile has classfiles filtered out.
+ final def sourceFile: AbstractFile =
+ if ((associatedFile eq NoAbstractFile) || (associatedFile.path endsWith ".class")) null else associatedFile
- /** Desire to re-use the field in ClassSymbol which stores the source
- * file to also store the classfile, but without changing the behavior
- * of sourceFile (which is expected at least in the IDE only to
- * return actual source code.) So sourceFile has classfiles filtered out.
+ /** Overridden in ModuleSymbols to delegate to the module class.
+ * Never null; if there is no associated file, returns NoAbstractFile.
*/
- private def sourceFileOnly(file: AbstractFile): AbstractFile =
- if ((file eq null) || (file.path endsWith ".class")) null else file
-
- private def binaryFileOnly(file: AbstractFile): AbstractFile =
- if ((file eq null) || !(file.path endsWith ".class")) null else file
-
- final def binaryFile: AbstractFile = binaryFileOnly(associatedFile)
- final def sourceFile: AbstractFile = sourceFileOnly(associatedFile)
-
- /** Overridden in ModuleSymbols to delegate to the module class. */
def associatedFile: AbstractFile = enclosingTopLevelClass.associatedFile
def associatedFile_=(f: AbstractFile) { abort("associatedFile_= inapplicable for " + this) }
@@ -2222,9 +2353,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ------ toString -------------------------------------------------------------------
- /** A tag which (in the ideal case) uniquely identifies class symbols */
- final def tag: Int = fullName.##
-
/** The simple name of this Symbol */
final def simpleName: Name = name
@@ -2251,7 +2379,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
private def symbolKind: SymbolKind = {
var kind =
- if (isTermMacro) ("macro method", "macro method", "MAC")
+ if (isTermMacro) ("term macro", "macro method", "MACM")
else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE")
else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY")
else if (isPackage) ("package", "package", "PK")
@@ -2314,12 +2442,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* If settings.uniqid, adds id.
* If settings.Yshowsymkinds, adds abbreviated symbol kind.
*/
- def nameString: String = (
- if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + originalName.decode
- else if (settings.uniqid.value && !settings.Yshowsymkinds.value) originalName.decode + "#" + id
- else if (!settings.uniqid.value && settings.Yshowsymkinds.value) originalName.decode + "#" + abbreviatedKindString
- else originalName.decode + "#" + id + "#" + abbreviatedKindString
- )
+ def nameString: String = {
+ val name_s = if (settings.debug.value) "" + unexpandedName else unexpandedName.dropLocal.decode
+ val id_s = if (settings.uniqid.value) "#" + id else ""
+ val kind_s = if (settings.Yshowsymkinds.value) "#" + abbreviatedKindString else ""
+
+ name_s + id_s + kind_s
+ }
def fullNameString: String = {
def recur(sym: Symbol): String = {
@@ -2455,6 +2584,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isMethod = this hasFlag METHOD
override def isModule = this hasFlag MODULE
override def isOverloaded = this hasFlag OVERLOADED
+ /*** !!! TODO: shouldn't we do something like the following:
+ override def isOverloaded = (
+ if (this.isInitialized)
+ this hasFlag OVERLOADED
+ else
+ (infos ne null) && infos.info.isInstanceOf[OverloadedType]
+ )
+ ***/
override def isPackage = this hasFlag PACKAGE
override def isValueParameter = this hasFlag PARAM
@@ -2467,13 +2604,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR
override def isConstructor = nme.isConstructorName(name)
- override def isPackageObject = isModule && (name == nme.PACKAGE)
- override def isStable = !isUnstable
- private def isUnstable = (
- isMutable
- || (hasFlag(METHOD | BYNAMEPARAM) && !hasFlag(STABLE))
- || (tpe.isVolatile && !hasAnnotation(uncheckedStableClass))
- )
+ override def isPackageObject = isModule && (name == nme.PACKAGE)
// The name in comments is what it is being disambiguated from.
// TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names.
@@ -2553,36 +2684,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
name = nme.expandedName(name.toTermName, base)
}
}
-
- protected def doCookJavaRawInfo() {
- def cook(sym: Symbol) {
- require(sym.isJavaDefined, sym)
- // @M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
- // object rawToExistentialInJava extends TypeMap {
- // def apply(tp: Type): Type = tp match {
- // // any symbol that occurs in a java sig, not just java symbols
- // // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
- // case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
- // val eparams = typeParamsToExistentials(sym, sym.typeParams)
- // existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
- // case _ =>
- // mapOver(tp)
- // }
- // }
- val tpe1 = rawToExistential(sym.tpe)
- // println("cooking: "+ sym +": "+ sym.tpe +" to "+ tpe1)
- if (tpe1 ne sym.tpe) {
- sym.setInfo(tpe1)
- }
- }
-
- if (isJavaDefined)
- cook(this)
- else if (isOverloaded)
- for (sym2 <- alternatives)
- if (sym2.isJavaDefined)
- cook(sym2)
- }
}
implicit val TermSymbolTag = ClassTag[TermSymbol](classOf[TermSymbol])
@@ -2669,11 +2770,28 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
type TypeOfClonedSymbol = TypeSymbol
+ override def variance = if (hasLocalFlag) Bivariant else info.typeSymbol.variance
+ override def isContravariant = variance.isContravariant
+ override def isCovariant = variance.isCovariant
final override def isAliasType = true
override def cloneSymbolImpl(owner: Symbol, newFlags: Long): TypeSymbol =
owner.newNonClassSymbol(name, pos, newFlags)
}
+ /** Let's say you have a type definition
+ *
+ * {{{
+ * type T <: Number
+ * }}}
+ *
+ * and tsym is the symbol corresponding to T. Then
+ *
+ * {{{
+ * tsym is an instance of AbstractTypeSymbol
+ * tsym.info == TypeBounds(Nothing, Number)
+ * tsym.tpe == TypeRef(NoPrefix, T, List())
+ * }}}
+ */
class AbstractTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName)
extends TypeSymbol(initOwner, initPos, initName) {
type TypeOfClonedSymbol = TypeSymbol
@@ -2702,7 +2820,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def asNameType(n: Name) = n.toTypeName
override def isNonClassType = true
- override def isTypeMacro = hasFlag(MACRO)
override def resolveOverloadedFlag(flag: Long) = flag match {
case TRAIT => "<trait>" // DEFAULTPARAM
@@ -2720,7 +2837,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isAbstractType = this hasFlag DEFERRED
override def isContravariant = this hasFlag CONTRAVARIANT
override def isCovariant = this hasFlag COVARIANT
- override def isExistentialQuantified = isExistentiallyBound && !isSkolem
override def isExistentiallyBound = this hasFlag EXISTENTIAL
override def isTypeParameter = isTypeParameterOrSkolem && !isSkolem
override def isTypeParameterOrSkolem = this hasFlag PARAM
@@ -2742,63 +2858,63 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
private def newPrefix = if (this hasFlag EXISTENTIAL | PARAM) NoPrefix else owner.thisType
private def newTypeRef(targs: List[Type]) = typeRef(newPrefix, this, targs)
- /** Let's say you have a type definition
+ /** A polymorphic type symbol has two distinct "types":
*
- * {{{
- * type T <: Number
- * }}}
+ * tpe_* a TypeRef with: dummy type args, no unapplied type parameters, and kind *
+ * tpeHK a TypeRef with: no type args, unapplied type parameters, and
+ * kind (*,*,...,*) => * depending on the number of tparams.
*
- * and tsym is the symbol corresponding to T. Then
- *
- * {{{
- * tsym.info = TypeBounds(Nothing, Number)
- * tsym.tpe = TypeRef(NoPrefix, T, List())
- * }}}
- */
- override def tpe: Type = {
- if (tpeCache eq NoType) throw CyclicReference(this, typeConstructor)
+ * The dummy type args in tpe_* are created by wrapping a TypeRef
+ * around the type parameter symbols. Types containing dummies will
+ * hide errors or introduce spurious ones if they are passed around
+ * as if normal types. They should only be used in local operations
+ * where they will either be discarded immediately after, or will
+ * undergo substitution in which the dummies are replaced by actual
+ * type arguments.
+ */
+ override def tpe_* : Type = {
+ maybeUpdateTypeCache()
+ tpeCache
+ }
+ override def typeConstructor: Type = {
+ if (tyconCacheNeedsUpdate)
+ setTyconCache(newTypeRef(Nil))
+ tyconCache
+ }
+ override def tpeHK: Type = typeConstructor
+
+ private def tyconCacheNeedsUpdate = (tyconCache eq null) || tyconRunId != currentRunId
+ private def setTyconCache(tycon: Type) {
+ tyconCache = tycon
+ tyconRunId = currentRunId
+ assert(tyconCache ne null, this)
+ }
+
+ private def maybeUpdateTypeCache() {
if (tpePeriod != currentPeriod) {
- if (isValid(tpePeriod)) {
+ if (isValid(tpePeriod))
tpePeriod = currentPeriod
- } else {
- if (isInitialized) tpePeriod = currentPeriod
- tpeCache = NoType
- val targs =
- if (phase.erasedTypes && this != ArrayClass) List()
- else unsafeTypeParams map (_.typeConstructor)
- //@M! use typeConstructor to generate dummy type arguments,
- // sym.tpe should not be called on a symbol that's supposed to be a higher-kinded type
- // memberType should be used instead, that's why it uses tpeHK and not tpe
- tpeCache = newTypeRef(targs)
- }
+ else
+ updateTypeCache() // perform the actual update
}
- assert(tpeCache ne null/*, "" + this + " " + phase*/)//debug
- tpeCache
}
+ private def updateTypeCache() {
+ if (tpeCache eq NoType)
+ throw CyclicReference(this, typeConstructor)
- /** @M -- tpe vs tpeHK:
- *
- * tpe: creates a TypeRef with dummy type arguments and kind *
- * tpeHK: creates a TypeRef with no type arguments but with type parameters
- *
- * If typeParams is nonEmpty, calling tpe may hide errors or
- * introduce spurious ones. (For example, when deriving a type from
- * the symbol of a type argument that may be higher-kinded.) As far
- * as I can tell, it only makes sense to call tpe in conjunction
- * with a substitution that replaces the generated dummy type
- * arguments by their actual types.
- *
- * TODO: the above conditions desperately need to be enforced by code.
- */
- override def tpeHK = typeConstructor // @M! used in memberType
+ if (isInitialized)
+ tpePeriod = currentPeriod
- override def typeConstructor: Type = {
- if ((tyconCache eq null) || tyconRunId != currentRunId) {
- tyconCache = newTypeRef(Nil)
- tyconRunId = currentRunId
- }
- assert(tyconCache ne null)
- tyconCache
+ tpeCache = NoType // cycle marker
+ val noTypeParams = phase.erasedTypes && this != ArrayClass || unsafeTypeParams.isEmpty
+ tpeCache = newTypeRef(
+ if (noTypeParams) Nil
+ else unsafeTypeParams map (_.typeConstructor)
+ )
+ // Avoid carrying around different types in tyconCache and tpeCache
+ // for monomorphic types.
+ if (noTypeParams && tyconCacheNeedsUpdate)
+ setTyconCache(tpeCache)
}
override def info_=(tp: Type) {
@@ -2824,15 +2940,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* public class Test1<T extends Test3> {}
* info for T in Test1 should be >: Nothing <: Test3[_]
*/
- protected def doCookJavaRawInfo() {
- if (isJavaDefined || owner.isJavaDefined) {
- val tpe1 = rawToExistential(info)
- // println("cooking type: "+ this +": "+ info +" to "+ tpe1)
- if (tpe1 ne info) {
- setInfo(tpe1)
- }
- }
- }
if (Statistics.hotEnabled) Statistics.incCounter(typeSymbolCount)
}
@@ -2862,11 +2969,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// a type symbol bound by an existential type, for instance the T in
// List[T] forSome { type T }
override def isExistentialSkolem = this hasFlag EXISTENTIAL
- override def isGADTSkolem = this hasAllFlags GADT_SKOLEM_FLAGS
+ override def isGADTSkolem = this hasAllFlags GADT_SKOLEM_FLAGS.toLong
override def isTypeSkolem = this hasFlag PARAM
override def isAbstractType = this hasFlag DEFERRED
- override def isExistentialQuantified = false
override def existentialBound = if (isAbstractType) this.info else super.existentialBound
/** If typeskolem comes from a type parameter, that parameter, otherwise skolem itself */
@@ -2925,7 +3031,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isAnonymousClass = name containsName tpnme.ANON_CLASS_NAME
override def isConcreteClass = !(this hasFlag ABSTRACT | TRAIT)
override def isJavaInterface = hasAllFlags(JAVA | TRAIT)
- override def isNestedClass = !owner.isPackageClass
+ override def isNestedClass = !isTopLevel
override def isNumericValueClass = definitions.isNumericValueClass(this)
override def isNumeric = isNumericValueClass
override def isPackageObjectClass = isModuleClass && (name == tpnme.PACKAGE)
@@ -2951,23 +3057,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def isLocalClass = (
isAnonOrRefinementClass
|| isLocal
- || !owner.isPackageClass && owner.isLocalClass
+ || !isTopLevel && owner.isLocalClass
)
- override def isStableClass = (this hasFlag STABLE) || checkStable()
-
- private def checkStable() = {
- def hasNoAbstractTypeMember(clazz: Symbol): Boolean =
- (clazz hasFlag STABLE) || {
- var e = clazz.info.decls.elems
- while ((e ne null) && !(e.sym.isAbstractType && info.member(e.sym.name) == e.sym))
- e = e.next
- e == null
- }
- (info.baseClasses forall hasNoAbstractTypeMember) && {
- setFlag(STABLE)
- true
- }
- }
override def enclClassChain = this :: owner.enclClassChain
@@ -2976,8 +3067,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* returned, otherwise, `NoSymbol` is returned.
*/
protected final def companionModule0: Symbol =
- flatOwnerInfo.decl(name.toTermName).suchThat(
- sym => sym.isModule && (sym isCoDefinedWith this) && !sym.isMethod)
+ flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModuleNotMethod && (sym isCoDefinedWith this))
override def companionModule = companionModule0
override def companionSymbol = companionModule0
@@ -2994,7 +3084,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (c.isOverloaded) c.alternatives.head else c
}
- override def associatedFile = if (owner.isPackageClass) _associatedFile else super.associatedFile
+ override def associatedFile = (
+ if (!isTopLevel) super.associatedFile
+ else if (_associatedFile eq null) NoAbstractFile // guarantee not null, but save cost of initializing the var
+ else _associatedFile
+ )
override def associatedFile_=(f: AbstractFile) { _associatedFile = f }
override def reset(completer: Type): this.type = {
@@ -3043,9 +3137,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
clone.typeOfThis = typeOfThis
clone.thisSym setName thisSym.name
}
- if (_associatedFile ne null)
- clone.associatedFile = _associatedFile
-
+ clone.associatedFile = _associatedFile
clone
}
@@ -3153,6 +3245,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
)
}
trait StubSymbol extends Symbol {
+ devWarning("creating stub symbol to defer error: " + missingMessage)
+
protected def missingMessage: String
/** Fail the stub by throwing a [[scala.reflect.internal.MissingRequirementError]]. */
@@ -3180,8 +3274,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def info = fail(NoType)
override def rawInfo = fail(NoType)
override def companionSymbol = fail(NoSymbol)
-
- debugwarn("creating stub symbol to defer error: " + missingMessage)
}
class StubClassSymbol(owner0: Symbol, name0: TypeName, protected val missingMessage: String) extends ClassSymbol(owner0, owner0.pos, name0) with StubSymbol
class StubTermSymbol(owner0: Symbol, name0: TermName, protected val missingMessage: String) extends TermSymbol(owner0, owner0.pos, name0) with StubSymbol
@@ -3231,7 +3323,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def enclosingTopLevelClass: Symbol = this
override def enclosingPackageClass: Symbol = this
override def enclMethod: Symbol = this
- override def associatedFile = null
+ override def associatedFile = NoAbstractFile
override def ownerChain: List[Symbol] = List()
override def ownersIterator: Iterator[Symbol] = Iterator.empty
override def alternatives: List[Symbol] = List()
@@ -3239,15 +3331,12 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def info: Type = NoType
override def existentialBound: Type = NoType
override def rawInfo: Type = NoType
- protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = enclosingRootClass
def cloneSymbolImpl(owner: Symbol, newFlags: Long) = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
override def owner: Symbol =
abort("no-symbol does not have an owner")
- override def typeConstructor: Type =
- abort("no-symbol does not have a type constructor (this may indicate scalac cannot find fundamental classes)")
}
protected def makeNoSymbol: NoSymbol = new NoSymbol
@@ -3352,12 +3441,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// ----- Hoisted closures and convenience methods, for compile time reductions -------
private[scala] final val symbolIsPossibleInRefinement = (sym: Symbol) => sym.isPossibleInRefinement
- private[scala] final val symbolIsNonVariant = (sym: Symbol) => sym.variance == 0
@tailrec private[scala] final
def allSymbolsHaveOwner(syms: List[Symbol], owner: Symbol): Boolean = syms match {
case sym :: rest => sym.owner == owner && allSymbolsHaveOwner(rest, owner)
- case _ => true
+ case _ => true
}
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index c1753fc5a1..6abf344adb 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
abstract class TreeGen extends macros.TreeBuilder {
@@ -11,10 +12,7 @@ abstract class TreeGen extends macros.TreeBuilder {
def rootScalaDot(name: Name) = Select(rootId(nme.scala_) setSymbol ScalaPackage, name)
def scalaDot(name: Name) = Select(Ident(nme.scala_) setSymbol ScalaPackage, name)
def scalaAnnotationDot(name: Name) = Select(scalaDot(nme.annotation), name)
- def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass
- def scalaUnitConstr = scalaDot(tpnme.Unit) setSymbol UnitClass
- def productConstr = scalaDot(tpnme.Product) setSymbol ProductRootClass
- def serializableConstr = scalaDot(tpnme.Serializable) setSymbol SerializableClass
+ def scalaAnyRefConstr = scalaDot(tpnme.AnyRef) setSymbol AnyRefClass // used in ide
def scalaFunctionConstr(argtpes: List[Tree], restpe: Tree, abstractFun: Boolean = false): Tree = {
val cls = if (abstractFun)
@@ -116,7 +114,7 @@ abstract class TreeGen extends macros.TreeBuilder {
}
/** Builds a reference to given symbol with given stable prefix. */
- def mkAttributedRef(pre: Type, sym: Symbol): Tree = {
+ def mkAttributedRef(pre: Type, sym: Symbol): RefTree = {
val qual = mkAttributedQualifier(pre)
qual match {
case EmptyTree => mkAttributedIdent(sym)
@@ -126,33 +124,33 @@ abstract class TreeGen extends macros.TreeBuilder {
}
/** Builds a reference to given symbol. */
- def mkAttributedRef(sym: Symbol): Tree =
+ def mkAttributedRef(sym: Symbol): RefTree =
if (sym.owner.isClass) mkAttributedRef(sym.owner.thisType, sym)
else mkAttributedIdent(sym)
- /** Builds an untyped reference to given symbol. */
- def mkUnattributedRef(sym: Symbol): Tree =
- if (sym.owner.isClass) Select(This(sym.owner), sym)
- else Ident(sym)
+ def mkUnattributedRef(sym: Symbol): RefTree = mkUnattributedRef(sym.fullNameAsName('.'))
+
+ def mkUnattributedRef(fullName: Name): RefTree = {
+ val hd :: tl = nme.segments(fullName.toString, assumeTerm = fullName.isTermName)
+ tl.foldLeft(Ident(hd): RefTree)(Select(_,_))
+ }
/** Replaces tree type with a stable type if possible */
- def stabilize(tree: Tree): Tree = {
- for(tp <- stableTypeFor(tree)) tree.tpe = tp
- tree
+ def stabilize(tree: Tree): Tree = stableTypeFor(tree) match {
+ case Some(tp) => tree setType tp
+ case _ => tree
}
/** Computes stable type for a tree if possible */
- def stableTypeFor(tree: Tree): Option[Type] = tree match {
- case This(_) if tree.symbol != null && !tree.symbol.isError =>
- Some(ThisType(tree.symbol))
- case Ident(_) if tree.symbol.isStable =>
- Some(singleType(tree.symbol.owner.thisType, tree.symbol))
- case Select(qual, _) if ((tree.symbol ne null) && (qual.tpe ne null)) && // turned assert into guard for #4064
- tree.symbol.isStable && qual.tpe.isStable =>
- Some(singleType(qual.tpe, tree.symbol))
- case _ =>
- None
- }
+ def stableTypeFor(tree: Tree): Option[Type] =
+ if (treeInfo.admitsTypeSelection(tree))
+ tree match {
+ case This(_) => Some(ThisType(tree.symbol))
+ case Ident(_) => Some(singleType(tree.symbol.owner.thisType, tree.symbol))
+ case Select(qual, _) => Some(singleType(qual.tpe, tree.symbol))
+ case _ => None
+ }
+ else None
/** Builds a reference with stable type to given symbol */
def mkAttributedStableRef(pre: Type, sym: Symbol): Tree =
@@ -161,13 +159,13 @@ abstract class TreeGen extends macros.TreeBuilder {
def mkAttributedStableRef(sym: Symbol): Tree =
stabilize(mkAttributedRef(sym))
- def mkAttributedThis(sym: Symbol): Tree =
+ def mkAttributedThis(sym: Symbol): This =
This(sym.name.toTypeName) setSymbol sym setType sym.thisType
- def mkAttributedIdent(sym: Symbol): Tree =
- Ident(sym.name) setSymbol sym setType sym.tpe
+ def mkAttributedIdent(sym: Symbol): RefTree =
+ Ident(sym.name) setSymbol sym setType sym.tpeHK
- def mkAttributedSelect(qual: Tree, sym: Symbol): Tree = {
+ def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree = {
// Tests involving the repl fail without the .isEmptyPackage condition.
if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
mkAttributedIdent(sym)
@@ -213,7 +211,7 @@ abstract class TreeGen extends macros.TreeBuilder {
mkTypeApply(mkAttributedSelect(target, method), targs map TypeTree)
private def mkSingleTypeApply(value: Tree, tpe: Type, what: Symbol, wrapInApply: Boolean) = {
- val tapp = mkAttributedTypeApply(value, what, tpe.normalize :: Nil)
+ val tapp = mkAttributedTypeApply(value, what, tpe.dealias :: Nil)
if (wrapInApply) Apply(tapp, Nil) else tapp
}
private def typeTestSymbol(any: Boolean) = if (any) Any_isInstanceOf else Object_isInstanceOf
@@ -229,7 +227,7 @@ abstract class TreeGen extends macros.TreeBuilder {
/** Cast `tree` to `pt`, unless tpe is a subtype of pt, or pt is Unit. */
def maybeMkAsInstanceOf(tree: Tree, pt: Type, tpe: Type, beforeRefChecks: Boolean = false): Tree =
- if ((pt == UnitClass.tpe) || (tpe <:< pt)) tree
+ if ((pt == UnitTpe) || (tpe <:< pt)) tree
else atPos(tree.pos)(mkAsInstanceOf(tree, pt, any = true, wrapInApply = !beforeRefChecks))
/** Apparently we smuggle a Type around as a Literal(Constant(tp))
@@ -248,10 +246,6 @@ abstract class TreeGen extends macros.TreeBuilder {
Literal(Constant(tp)) setType ConstantType(Constant(tp))
/** Builds a list with given head and tail. */
- def mkNewCons(head: Tree, tail: Tree): Tree =
- New(Apply(mkAttributedRef(ConsClass), List(head, tail)))
-
- /** Builds a list with given head and tail. */
def mkNil: Tree = mkAttributedRef(NilModule)
/** Builds a tree representing an undefined local, as in
@@ -259,7 +253,7 @@ abstract class TreeGen extends macros.TreeBuilder {
* which is appropriate to the given Type.
*/
def mkZero(tp: Type): Tree = tp.typeSymbol match {
- case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingClass.tpe
+ case NothingClass => mkMethodCall(Predef_???, Nil) setType NothingTpe
case _ => Literal(mkConstantZero(tp)) setType tp
}
@@ -276,9 +270,13 @@ abstract class TreeGen extends macros.TreeBuilder {
case _ => Constant(null)
}
+ /** Wrap an expression in a named argument. */
+ def mkNamedArg(name: Name, tree: Tree): Tree = mkNamedArg(Ident(name), tree)
+ def mkNamedArg(lhs: Tree, rhs: Tree): Tree = atPos(rhs.pos)(AssignOrNamedArg(lhs, rhs))
+
/** Builds a tuple */
def mkTuple(elems: List[Tree]): Tree =
- if (elems.isEmpty) Literal(Constant())
+ if (elems.isEmpty) Literal(Constant(()))
else Apply(
Select(mkAttributedRef(TupleClass(elems.length).caseModule), nme.apply),
elems)
@@ -295,4 +293,13 @@ abstract class TreeGen extends macros.TreeBuilder {
assert(ReflectRuntimeUniverse != NoSymbol)
mkAttributedRef(ReflectRuntimeUniverse) setType singleType(ReflectRuntimeUniverse.owner.thisPrefix, ReflectRuntimeUniverse)
}
+
+ def mkPackageDef(packageName: String, stats: List[Tree]): PackageDef = {
+ PackageDef(mkUnattributedRef(newTermName(packageName)), stats)
+ }
+
+ def mkSeqApply(arg: Tree): Apply = {
+ val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
+ Apply(factory, List(arg))
+ }
}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index fa4441e513..d1e8a04553 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -17,7 +18,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
+ import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType, uncheckedStableClass }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -65,6 +66,83 @@ abstract class TreeInfo {
false
}
+ /** Is `tree` a path, defined as follows? (Spec: 3.1 Paths)
+ *
+ * - The empty path ε (which cannot be written explicitly in user programs).
+ * - C.this, where C references a class.
+ * - p.x where p is a path and x is a stable member of p.
+ * - C.super.x or C.super[M].x where C references a class
+ * and x references a stable member of the super class or designated parent class M of C.
+ *
+ * NOTE: Trees with errors are (mostly) excluded.
+ *
+ * Path ::= StableId | [id ‘.’] this
+ *
+ */
+ def isPath(tree: Tree, allowVolatile: Boolean): Boolean =
+ tree match {
+ // Super is not technically a path.
+ // However, syntactically, it can only occur nested in a Select.
+ // This gives a nicer definition of isStableIdentifier that's equivalent to the spec's.
+ // must consider Literal(_) a path for typedSingletonTypeTree
+ case EmptyTree | Literal(_) => true
+ case This(_) | Super(_, _) => symOk(tree.symbol)
+ case _ => isStableIdentifier(tree, allowVolatile)
+ }
+
+ /** Is `tree` a stable identifier, a path which ends in an identifier?
+ *
+ * StableId ::= id
+ * | Path ‘.’ id
+ * | [id ’.’] ‘super’ [‘[’ id ‘]’] ‘.’ id
+ */
+ def isStableIdentifier(tree: Tree, allowVolatile: Boolean): Boolean =
+ tree match {
+ case Ident(_) => symOk(tree.symbol) && tree.symbol.isStable && !tree.symbol.hasVolatileType // TODO SPEC: not required by spec
+ case Select(qual, _) => isStableMemberOf(tree.symbol, qual, allowVolatile) && isPath(qual, allowVolatile)
+ case Apply(Select(free @ Ident(_), nme.apply), _) if free.symbol.name endsWith nme.REIFY_FREE_VALUE_SUFFIX =>
+ // see a detailed explanation of this trick in `GenSymbols.reifyFreeTerm`
+ free.symbol.hasStableFlag && isPath(free, allowVolatile)
+ case _ => false
+ }
+
+ private def symOk(sym: Symbol) = sym != null && !sym.isError && sym != NoSymbol
+ private def typeOk(tp: Type) = tp != null && ! tp.isError
+
+ /** Assuming `sym` is a member of `tree`, is it a "stable member"?
+ *
+ * Stable members are packages or members introduced
+ * by object definitions or by value definitions of non-volatile types (§3.6).
+ */
+ def isStableMemberOf(sym: Symbol, tree: Tree, allowVolatile: Boolean): Boolean = (
+ symOk(sym) && (!sym.isTerm || (sym.isStable && (allowVolatile || !sym.hasVolatileType))) &&
+ typeOk(tree.tpe) && (allowVolatile || !hasVolatileType(tree)) && !definitions.isByNameParamType(tree.tpe)
+ )
+
+ /** Is `tree`'s type volatile? (Ignored if its symbol has the @uncheckedStable annotation.)
+ */
+ def hasVolatileType(tree: Tree): Boolean =
+ symOk(tree.symbol) && tree.tpe.isVolatile && !tree.symbol.hasAnnotation(uncheckedStableClass)
+
+ /** Is `tree` either a non-volatile type,
+ * or a path that does not include any of:
+ * - a reference to a mutable variable/field
+ * - a reference to a by-name parameter
+ * - a member selection on a volatile type (Spec: 3.6 Volatile Types)?
+ *
+ * Such a tree is a suitable target for type selection.
+ */
+ def admitsTypeSelection(tree: Tree): Boolean = isPath(tree, allowVolatile = false)
+
+ /** Is `tree` admissible as a stable identifier pattern (8.1.5 Stable Identifier Patterns)?
+ *
+ * We disregard volatility, as it's irrelevant in patterns (SI-6815)
+ */
+ def isStableIdentifierPattern(tree: Tree): Boolean = isStableIdentifier(tree, allowVolatile = true)
+
+ // TODO SI-5304 tighten this up so we don't elide side effect in module loads
+ def isQualifierSafeToElide(tree: Tree): Boolean = isExprSafeToInline(tree)
+
/** Is tree an expression which can be inlined without affecting program semantics?
*
* Note that this is not called "isExprPure" since purity (lack of side-effects)
@@ -108,80 +186,69 @@ abstract class TreeInfo {
false
}
- @deprecated("Use isExprSafeToInline instead", "2.10.0")
- def isPureExpr(tree: Tree) = isExprSafeToInline(tree)
+ /** As if the name of the method didn't give it away,
+ * this logic is designed around issuing helpful
+ * warnings and minimizing spurious ones. That means
+ * don't reuse it for important matters like inlining
+ * decisions.
+ */
+ def isPureExprForWarningPurposes(tree: Tree) = tree match {
+ case EmptyTree | Literal(Constant(())) => false
+ case _ =>
+ def isWarnableRefTree = tree match {
+ case t: RefTree => isExprSafeToInline(t.qualifier) && t.symbol != null && t.symbol.isAccessor
+ case _ => false
+ }
+ def isWarnableSymbol = {
+ val sym = tree.symbol
+ (sym == null) || !(sym.isModule || sym.isLazy) || {
+ debuglog("'Pure' but side-effecting expression in statement position: " + tree)
+ false
+ }
+ }
- def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] =
- mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg)))
+ ( !tree.isErrorTyped
+ && (isExprSafeToInline(tree) || isWarnableRefTree)
+ && isWarnableSymbol
+ )
+ }
def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = {
val b = List.newBuilder[R]
foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg))
- b.result
+ b.result()
}
def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = {
val plen = params.length
val alen = args.length
def fail() = {
- global.debugwarn(
- "Mismatch trying to zip method parameters and argument list:\n" +
- " params = " + params + "\n" +
- " args = " + args + "\n"
- )
+ global.devWarning(
+ s"""|Mismatch trying to zip method parameters and argument list:
+ | params = $params
+ | args = $args""".stripMargin)
false
}
if (plen == alen) foreach2(params, args)(f)
- else if (params.isEmpty) return fail
+ else if (params.isEmpty) return fail()
else if (isVarArgsList(params)) {
val plenInit = plen - 1
if (alen == plenInit) {
if (alen == 0) Nil // avoid calling mismatched zip
else foreach2(params.init, args)(f)
}
- else if (alen < plenInit) return fail
+ else if (alen < plenInit) return fail()
else {
foreach2(params.init, args take plenInit)(f)
val remainingArgs = args drop plenInit
foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f)
}
}
- else return fail
+ else return fail()
true
}
- /**
- * Selects the correct parameter list when there are nested applications.
- * Given Apply(fn, args), args might correspond to any of fn.symbol's parameter
- * lists. To choose the correct one before uncurry, we have to unwrap any
- * applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args
- * correspond to the third parameter list.
- *
- * The argument fn is the function part of the apply node being considered.
- *
- * Also accounts for varargs.
- */
- private def applyMethodParameters(fn: Tree): List[Symbol] = {
- val depth = dissectApplied(fn).applyDepth
- // There could be applies which go beyond the parameter list(s),
- // being applied to the result of the method call.
- // !!! Note that this still doesn't seem correct, although it should
- // be closer than what it replaced.
- if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth)
- else if (fn.symbol.paramss.isEmpty) Nil
- else fn.symbol.paramss.last
- }
-
- def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match {
- case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args)
- case _ => Nil
- }
- def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match {
- case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f)
- case _ =>
- }
-
/** Is symbol potentially a getter of a variable?
*/
def mayBeVarGetter(sym: Symbol): Boolean = sym.info match {
@@ -196,7 +263,7 @@ abstract class TreeInfo {
def isVariableOrGetter(tree: Tree) = {
def sym = tree.symbol
def isVar = sym.isVariable
- def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(nme.getterToSetter(sym.name.toTermName)) != NoSymbol
+ def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(sym.setterName) != NoSymbol
tree match {
case Ident(_) => isVar
@@ -210,16 +277,16 @@ abstract class TreeInfo {
* same object?
*/
def isSelfConstrCall(tree: Tree): Boolean = tree match {
- case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
+ case Applied(Ident(nme.CONSTRUCTOR), _, _) => true
case Applied(Select(This(_), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ case _ => false
}
/** Is tree a super constructor call?
*/
def isSuperConstrCall(tree: Tree): Boolean = tree match {
case Applied(Select(Super(_, _), nme.CONSTRUCTOR), _, _) => true
- case _ => false
+ case _ => false
}
/**
@@ -325,10 +392,6 @@ abstract class TreeInfo {
case x: Ident => !x.isBackquoted && nme.isVariableName(x.name)
case _ => false
}
- def isDeprecatedIdentifier(tree: Tree): Boolean = tree match {
- case x: Ident => !x.isBackquoted && nme.isDeprecatedIdentifierName(x.name)
- case _ => false
- }
/** The first constructor definitions in `stats` */
def firstConstructor(stats: List[Tree]): Tree = stats find {
@@ -346,6 +409,9 @@ abstract class TreeInfo {
def preSuperFields(stats: List[Tree]): List[ValDef] =
stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
+ def hasUntypedPreSuperFields(stats: List[Tree]): Boolean =
+ preSuperFields(stats) exists (_.tpt.isEmpty)
+
def isEarlyDef(tree: Tree) = tree match {
case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
@@ -384,15 +450,17 @@ abstract class TreeInfo {
case _ => false
}
+ /** Translates an Assign(_, _) node to AssignOrNamedArg(_, _) if
+ * the lhs is a simple ident. Otherwise returns unchanged.
+ */
+ def assignmentToMaybeNamedArg(tree: Tree) = tree match {
+ case t @ Assign(id: Ident, rhs) => atPos(t.pos)(AssignOrNamedArg(id, rhs))
+ case t => t
+ }
+
/** Is name a left-associative operator? */
def isLeftAssoc(operator: Name) = operator.nonEmpty && (operator.endChar != ':')
- /** Is tree a `this` node which belongs to `enclClass`? */
- def isSelf(tree: Tree, enclClass: Symbol): Boolean = tree match {
- case This(_) => tree.symbol == enclClass
- case _ => false
- }
-
/** a Match(Typed(_, tpt), _) must be translated into a switch if isSwitchAnnotation(tpt.tpe) */
def isSwitchAnnotation(tpe: Type) = tpe hasAnnotation definitions.SwitchClass
@@ -408,8 +476,15 @@ abstract class TreeInfo {
/** Is this argument node of the form <expr> : _* ?
*/
def isWildcardStarArg(tree: Tree): Boolean = tree match {
- case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true
- case _ => false
+ case WildcardStarArg(_) => true
+ case _ => false
+ }
+
+ object WildcardStarArg {
+ def unapply(tree: Typed): Option[Tree] = tree match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => Some(expr)
+ case _ => None
+ }
}
/** If this tree has type parameters, those. Otherwise Nil.
@@ -445,6 +520,23 @@ abstract class TreeInfo {
case _ => false
}
+ private def hasNoSymbol(t: Tree) = t.symbol == null || t.symbol == NoSymbol
+
+ /** If this CaseDef assigns a name to its top-level pattern,
+ * in the form 'expr @ pattern' or 'expr: pattern', returns
+ * the name. Otherwise, nme.NO_NAME.
+ *
+ * Note: in the case of Constant patterns such as 'case x @ "" =>',
+ * the pattern matcher eliminates the binding and inlines the constant,
+ * so as far as this method is likely to be able to determine,
+ * the name is NO_NAME.
+ */
+ def assignedNameOfPattern(cdef: CaseDef): Name = cdef.pat match {
+ case Bind(name, _) => name
+ case Ident(name) => name
+ case _ => nme.NO_NAME
+ }
+
/** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
* whether the user provided cases are exhaustive. */
def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
@@ -453,14 +545,13 @@ abstract class TreeInfo {
}
/** Does this CaseDef catch Throwable? */
- def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
-
- /** Does this CaseDef catch everything of a certain Type? */
- def catchesAllOf(cdef: CaseDef, threshold: Type) =
- isDefaultCase(cdef) || (cdef.guard.isEmpty && (unbind(cdef.pat) match {
- case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe != null) && (threshold <:< tpt.tpe)
- case _ => false
- }))
+ def catchesThrowable(cdef: CaseDef) = (
+ cdef.guard.isEmpty && (unbind(cdef.pat) match {
+ case Ident(nme.WILDCARD) => true
+ case i@Ident(name) => hasNoSymbol(i)
+ case _ => false
+ })
+ )
/** Is this pattern node a catch-all or type-test pattern? */
def isCatchCase(cdef: CaseDef) = cdef match {
@@ -485,7 +576,7 @@ abstract class TreeInfo {
tp match {
case TypeRef(pre, sym, args) =>
- args.isEmpty && (sym.owner.isPackageClass || isSimple(pre))
+ args.isEmpty && (sym.isTopLevel || isSimple(pre))
case NoPrefix =>
true
case _ =>
@@ -538,6 +629,10 @@ abstract class TreeInfo {
def isSynthCaseSymbol(sym: Symbol) = sym hasAllFlags SYNTH_CASE_FLAGS
def hasSynthCaseSymbol(t: Tree) = t.symbol != null && isSynthCaseSymbol(t.symbol)
+ def isTraitRef(tree: Tree): Boolean = {
+ val sym = if (tree.tpe != null) tree.tpe.typeSymbol else null
+ ((sym ne null) && sym.initialize.isTrait)
+ }
/** Applications in Scala can have one of the following shapes:
*
@@ -633,6 +728,12 @@ abstract class TreeInfo {
}
loop(tree)
}
+
+ override def toString = {
+ val tstr = if (targs.isEmpty) "" else targs.mkString("[", ", ", "]")
+ val astr = argss map (args => args.mkString("(", ", ", ")")) mkString ""
+ s"$core$tstr$astr"
+ }
}
/** Returns a wrapper that knows how to destructure and analyze applications.
@@ -649,6 +750,8 @@ abstract class TreeInfo {
* For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching.
*/
object Applied {
+ def apply(tree: Tree): Applied = new Applied(tree)
+
def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] =
Some((applied.core, applied.targs, applied.argss))
@@ -656,20 +759,9 @@ abstract class TreeInfo {
unapply(dissectApplied(tree))
}
- /** Does list of trees start with a definition of
- * a class of module with given name (ignoring imports)
- */
- def firstDefinesClassOrObject(trees: List[Tree], name: Name): Boolean = trees match {
- case Import(_, _) :: xs => firstDefinesClassOrObject(xs, name)
- case Annotated(_, tree1) :: Nil => firstDefinesClassOrObject(List(tree1), name)
- case ModuleDef(_, `name`, _) :: Nil => true
- case ClassDef(_, `name`, _, _) :: Nil => true
- case _ => false
- }
-
-
/** Is this file the body of a compilation unit which should not
- * have Predef imported?
+ * have Predef imported? This is the case iff the first import in the
+ * unit explicitly refers to Predef.
*/
def noPredefImportForUnit(body: Tree) = {
// Top-level definition whose leading imports include Predef.
@@ -678,13 +770,7 @@ abstract class TreeInfo {
case Import(expr, _) => isReferenceToPredef(expr)
case _ => false
}
- // Compilation unit is class or object 'name' in package 'scala'
- def isUnitInScala(tree: Tree, name: Name) = tree match {
- case PackageDef(Ident(nme.scala_), defs) => firstDefinesClassOrObject(defs, name)
- case _ => false
- }
-
- isUnitInScala(body, nme.Predef) || isLeadingPredefImport(body)
+ isLeadingPredefImport(body)
}
def isAbsTypeDef(tree: Tree) = tree match {
@@ -750,8 +836,17 @@ abstract class TreeInfo {
}
def unapply(tree: Tree) = refPart(tree) match {
- case ref: RefTree => Some((ref.qualifier.symbol, ref.symbol, dissectApplied(tree).targs))
- case _ => None
+ case ref: RefTree => {
+ val isBundle = definitions.isMacroBundleType(ref.qualifier.tpe)
+ val owner =
+ if (isBundle) ref.qualifier.tpe.typeSymbol
+ else {
+ val sym = ref.qualifier.symbol
+ if (sym.isModule) sym.moduleClass else sym
+ }
+ Some((isBundle, owner, ref.symbol, dissectApplied(tree).targs))
+ }
+ case _ => None
}
}
@@ -761,4 +856,15 @@ abstract class TreeInfo {
case tree: RefTree => true
case _ => false
})
+
+ def isMacroApplication(tree: Tree): Boolean =
+ !tree.isDef && tree.symbol != null && tree.symbol.isMacro && !tree.symbol.isErroneous
+
+ def isMacroApplicationOrBlock(tree: Tree): Boolean = tree match {
+ case Block(_, expr) => isMacroApplicationOrBlock(expr)
+ case tree => isMacroApplication(tree)
+ }
+
+ def isNonTrivialMacroApplication(tree: Tree): Boolean =
+ isMacroApplication(tree) && dissectApplied(tree).core != tree
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 2585b541ed..8781423a6d 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import Flags._
@@ -14,6 +15,23 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[scala] var nodeCount = 0
+ protected def treeLine(t: Tree): String =
+ if (t.pos.isDefined && t.pos.isRange) t.pos.lineContent.drop(t.pos.column - 1).take(t.pos.end - t.pos.start + 1)
+ else t.summaryString
+
+ protected def treeStatus(t: Tree, enclosingTree: Tree = null) = {
+ val parent = if (enclosingTree eq null) " " else " P#%5s".format(enclosingTree.id)
+
+ "[L%4s%8s] #%-6s %-15s %-10s // %s".format(t.pos.safeLine, parent, t.id, t.pos.show, t.shortClass, treeLine(t))
+ }
+ protected def treeSymStatus(t: Tree) = {
+ val line = if (t.pos.isDefined) "line %-4s".format(t.pos.safeLine) else " "
+ "#%-5s %s %-10s // %s".format(t.id, line, t.shortClass,
+ if (t.symbol ne NoSymbol) "(" + t.symbol.fullLocationString + ")"
+ else treeLine(t)
+ )
+ }
+
abstract class Tree extends TreeContextApiImpl with Attachable with Product {
val id = nodeCount // TODO: add to attachment?
nodeCount += 1
@@ -24,18 +42,24 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[this] var rawtpe: Type = _
final def tpe = rawtpe
- def tpe_=(t: Type) = rawtpe = t
+ @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t)
+
+ def clearType(): this.type = this setType null
def setType(tp: Type): this.type = { rawtpe = tp; this }
def defineType(tp: Type): this.type = setType(tp)
def symbol: Symbol = null //!!!OPT!!! symbol is about 3% of hot compile times -- megamorphic dispatch?
def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) }
def setSymbol(sym: Symbol): this.type = { symbol = sym; this }
- def hasSymbol = false
+ def hasSymbolField = false
+ @deprecated("Use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField
def isDef = false
def isEmpty = false
+ def nonEmpty = !isEmpty
+
+ def canHaveAttrs = true
/** The canonical way to test if a Tree represents a term.
*/
@@ -62,7 +86,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
private[scala] def copyAttrs(tree: Tree): this.type = {
rawatt = tree.rawatt
tpe = tree.tpe
- if (hasSymbol) symbol = tree.symbol
+ if (hasSymbolField) symbol = tree.symbol
this
}
@@ -158,6 +182,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def substituteThis(clazz: Symbol, to: Tree): Tree =
new ThisSubstituter(clazz, to) transform this
+ def replace(from: Tree, to: Tree): Tree =
+ new TreeReplacer(from, to, positionAware = false) transform this
+
def hasSymbolWhich(f: Symbol => Boolean) =
(symbol ne null) && (symbol ne NoSymbol) && f(symbol)
@@ -210,12 +237,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
trait TypTree extends Tree with TypTreeApi
abstract class SymTree extends Tree with SymTreeContextApi {
- override def hasSymbol = true
+ override def hasSymbolField = true
override var symbol: Symbol = NoSymbol
}
trait NameTree extends Tree with NameTreeApi {
def name: Name
+ def getterName: TermName = name.getterName
+ def setterName: TermName = name.setterName
+ def localName: TermName = name.localName
}
trait RefTree extends SymTree with NameTree with RefTreeApi {
@@ -223,19 +253,24 @@ trait Trees extends api.Trees { self: SymbolTable =>
def name: Name
}
+ object RefTree extends RefTreeExtractor {
+ def apply(qualifier: Tree, name: Name): RefTree = qualifier match {
+ case EmptyTree =>
+ Ident(name)
+ case qual if qual.isTerm =>
+ Select(qual, name)
+ case qual if qual.isType =>
+ assert(name.isTypeName, s"qual = $qual, name = $name")
+ SelectFromTypeTree(qual, name.toTypeName)
+ }
+ def unapply(refTree: RefTree): Option[(Tree, Name)] = Some((refTree.qualifier, refTree.name))
+ }
+
abstract class DefTree extends SymTree with NameTree with DefTreeApi {
def name: Name
override def isDef = true
}
- case object EmptyTree extends TermTree {
- val asList = List(this)
- super.tpe_=(NoType)
- override def tpe_=(t: Type) =
- if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
- override def isEmpty = true
- }
-
abstract class MemberDef extends DefTree with MemberDefApi {
def mods: Modifiers
def keyword: String = this match {
@@ -416,6 +451,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ApplyConstructor(tpt: Tree, args: List[Tree]) = Apply(Select(New(tpt), nme.CONSTRUCTOR), args)
+ // Creates a constructor call from the constructor symbol. This is
+ // to avoid winding up with an OverloadedType for the constructor call.
+ def NewFromConstructor(constructor: Symbol, args: Tree*) = {
+ assert(constructor.isConstructor, constructor)
+ val instance = New(TypeTree(constructor.owner.tpe))
+ val init = Select(instance, nme.CONSTRUCTOR) setSymbol constructor
+
+ Apply(init, args.toList)
+ }
+
case class ApplyDynamic(qual: Tree, args: List[Tree]) extends SymTree with TermTree
case class Super(qual: Tree, mix: TypeName) extends TermTree with SuperApi {
@@ -511,7 +556,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
case t => t
}
- orig = followOriginal(tree); setPos(tree.pos);
+ orig = followOriginal(tree); setPos(tree.pos)
this
}
@@ -603,6 +648,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
// TODO: ApplyConstructor ???
+ case self.pendingSuperCall => self.pendingSuperCall
case _ => new Apply(fun, args)
}).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
@@ -866,7 +912,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Is the tree Predef, scala.Predef, or _root_.scala.Predef?
*/
def isReferenceToPredef(t: Tree) = isReferenceToScalaMember(t, nme.Predef)
- def isReferenceToAnyVal(t: Tree) = isReferenceToScalaMember(t, tpnme.AnyVal)
// --- modifiers implementation ---------------------------------------
@@ -912,6 +957,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (flags1 == flags) this
else Modifiers(flags1, privateWithin, annotations) setPositions positions
}
+ def | (flag: Int): Modifiers = this | flag.toLong
def | (flag: Long): Modifiers = {
val flags1 = flags | flag
if (flags1 == flags) this
@@ -924,13 +970,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
def withPosition(flag: Long, position: Position) =
copy() setPositions positions + (flag -> position)
- override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers =
- Modifiers(flags, privateWithin, f(annotations)) setPositions positions
+ override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = {
+ val newAnns = f(annotations)
+ if (annotations == newAnns) this
+ else Modifiers(flags, privateWithin, newAnns) setPositions positions
+ }
override def toString = "Modifiers(%s, %s, %s)".format(flagString, annotations mkString ", ", positions)
}
- object Modifiers extends ModifiersCreator
+ object Modifiers extends ModifiersExtractor
implicit val ModifiersTag = ClassTag[Modifiers](classOf[Modifiers])
@@ -965,12 +1014,23 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
- object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
- override def isEmpty = true
+ trait CannotHaveAttrs extends Tree {
+ override def canHaveAttrs = false
+
+ private def unsupported(what: String, args: Any*) =
+ throw new UnsupportedOperationException(s"$what($args) inapplicable for "+self.toString)
+
super.setPos(NoPosition)
- override def setPos(pos: Position) = { assert(false); this }
+ override def setPos(pos: Position) = unsupported("setPos", pos)
+
+ super.setType(NoType)
+ override def tpe_=(t: Type) = if (t != NoType) unsupported("tpe_=", t)
}
+ case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
+ object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
+
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
@@ -1050,6 +1110,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
def New(tpe: Type, args: Tree*): Tree =
ApplyConstructor(TypeTree(tpe), args.toList)
+ def New(tpe: Type, argss: List[List[Tree]]): Tree =
+ New(TypeTree(tpe), argss)
+
def New(sym: Symbol, args: Tree*): Tree =
New(sym.tpe, args: _*)
@@ -1130,7 +1193,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
traverse(annot); traverse(arg)
case Template(parents, self, body) =>
traverseTrees(parents)
- if (!self.isEmpty) traverse(self)
+ if (self ne emptyValDef) traverse(self)
traverseStats(body, tree.symbol)
case Block(stats, expr) =>
traverseTrees(stats); traverse(expr)
@@ -1372,6 +1435,16 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree eq orig) super.transform(tree)
else tree
}
+
+ /** A transformer that replaces tree `from` with tree `to` in a given tree */
+ class TreeReplacer(from: Tree, to: Tree, positionAware: Boolean) extends Transformer {
+ override def transform(t: Tree): Tree = {
+ if (t == from) to
+ else if (!positionAware || (t.pos includes from.pos) || t.pos.isTransparent) super.transform(t)
+ else t
+ }
+ }
+
// Create a readable string describing a substitution.
private def substituterString(fromStr: String, toStr: String, from: List[Any], to: List[Any]): String = {
"subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
@@ -1387,7 +1460,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
def subst(from: List[Symbol], to: List[Tree]): Tree =
if (from.isEmpty) tree
else if (tree.symbol == from.head) to.head.shallowDuplicate // TODO: does it ever make sense *not* to perform a shallowDuplicate on `to.head`?
- else subst(from.tail, to.tail);
+ else subst(from.tail, to.tail)
subst(from, to)
case _ =>
super.transform(tree)
@@ -1400,7 +1473,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
class ThisSubstituter(clazz: Symbol, to: => Tree) extends Transformer {
val newtpe = to.tpe
override def transform(tree: Tree) = {
- if (tree.tpe ne null) tree.tpe = tree.tpe.substThis(clazz, newtpe)
+ tree modifyType (_.substThis(clazz, newtpe))
tree match {
case This(_) if tree.symbol == clazz => to
case _ => super.transform(tree)
@@ -1410,8 +1483,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
class TypeMapTreeSubstituter(val typeMap: TypeMap) extends Traverser {
override def traverse(tree: Tree) {
- if (tree.tpe ne null)
- tree.tpe = typeMap(tree.tpe)
+ tree modifyType typeMap
if (tree.isDef)
tree.symbol modifyInfo typeMap
@@ -1448,9 +1520,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree.symbol == from.head) tree setSymbol to.head
else subst(from.tail, to.tail)
}
+ tree modifyType symSubst
- if (tree.tpe ne null) tree.tpe = symSubst(tree.tpe)
- if (tree.hasSymbol) {
+ if (tree.hasSymbolField) {
subst(from, to)
tree match {
case _: DefTree =>
@@ -1517,6 +1589,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ private lazy val duplicator = new Duplicator(focusPositions = true)
+ private class Duplicator(focusPositions: Boolean) extends Transformer {
+ override val treeCopy = newStrictTreeCopier
+ override def transform(t: Tree) = {
+ val t1 = super.transform(t)
+ if ((t1 ne t) && t1.pos.isRange && focusPositions) t1 setPos t.pos.focus
+ t1
+ }
+ }
trait TreeStackTraverser extends Traverser {
import collection.mutable
val path: mutable.Stack[Tree] = mutable.Stack()
@@ -1526,14 +1607,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
- private lazy val duplicator = new Transformer {
- override val treeCopy = newStrictTreeCopier
- override def transform(t: Tree) = {
- val t1 = super.transform(t)
- if ((t1 ne t) && t1.pos.isRange) t1 setPos t.pos.focus
- t1
- }
- }
+ def duplicateAndKeepPositions(tree: Tree) = new Duplicator(focusPositions = false) transform tree
// ------ copiers -------------------------------------------
@@ -1590,6 +1664,21 @@ trait Trees extends api.Trees { self: SymbolTable =>
sys.error("Not a ClassDef: " + t + "/" + t.getClass)
}
+ def copyModuleDef(tree: Tree)(
+ mods: Modifiers = null,
+ name: Name = null,
+ impl: Template = null
+ ): ModuleDef = tree match {
+ case ModuleDef(mods0, name0, impl0) =>
+ treeCopy.ModuleDef(tree,
+ if (mods eq null) mods0 else mods,
+ if (name eq null) name0 else name,
+ if (impl eq null) impl0 else impl
+ )
+ case t =>
+ sys.error("Not a ModuleDef: " + t + "/" + t.getClass)
+ }
+
def deriveDefDef(ddef: Tree)(applyToRhs: Tree => Tree): DefDef = ddef match {
case DefDef(mods0, name0, tparams0, vparamss0, tpt0, rhs0) =>
treeCopy.DefDef(ddef, mods0, name0, tparams0, vparamss0, tpt0, applyToRhs(rhs0))
diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala
index 68b4fa69a1..71f84ab557 100644
--- a/src/reflect/scala/reflect/internal/TypeDebugging.scala
+++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala
@@ -3,14 +3,13 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package internal
trait TypeDebugging {
self: SymbolTable =>
- import definitions._
-
// @M toString that is safe during debugging (does not normalize, ...)
object typeDebug {
private def to_s(x: Any): String = x match {
@@ -20,7 +19,6 @@ trait TypeDebugging {
case x: Product => x.productIterator mkString ("(", ", ", ")")
case _ => "" + x
}
- def ptIndent(x: Any) = ("" + x).replaceAll("\\n", " ")
def ptBlock(label: String, pairs: (String, Any)*): String = {
if (pairs.isEmpty) label + "{ }"
else {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 26d8234278..3725f570a8 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
import scala.collection.{ mutable, immutable, generic }
@@ -14,8 +15,8 @@ import Flags._
import scala.util.control.ControlThrowable
import scala.annotation.tailrec
import util.Statistics
-import scala.runtime.ObjectRef
import util.ThreeValues._
+import Variance._
/* A standard type pattern match:
case ErrorType =>
@@ -68,33 +69,37 @@ import util.ThreeValues._
// a type variable
// Replace occurrences of type parameters with type vars, where
// inst is the instantiation and constr is a list of bounds.
- case DeBruijnIndex(level, index, args)
- // for dependent method types: a type referring to a method parameter.
- case ErasedValueType(tref)
+ case ErasedValueType(clazz, underlying)
// only used during erasure of derived value classes.
*/
-trait Types extends api.Types { self: SymbolTable =>
+trait Types
+ extends api.Types
+ with tpe.TypeComparers
+ with tpe.TypeToStrings
+ with tpe.CommonOwners
+ with tpe.GlbLubs
+ with tpe.TypeMaps
+ with tpe.TypeConstraints { self: SymbolTable =>
+
import definitions._
import TypesStats._
private var explainSwitch = false
private final val emptySymbolSet = immutable.Set.empty[Symbol]
- private final val LogPendingSubTypesThreshold = 50
- private final val LogPendingBaseTypesThreshold = 50
- private final val LogVolatileThreshold = 50
+ protected[internal] final val DefaultLogThreshhold = 50
+ private final val LogPendingBaseTypesThreshold = DefaultLogThreshhold
+ private final val LogVolatileThreshold = DefaultLogThreshhold
/** A don't care value for the depth parameter in lubs/glbs and related operations. */
- private final val AnyDepth = -3
+ protected[internal] final val AnyDepth = -3
/** Decrement depth unless it is a don't care. */
- private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
+ protected[internal] final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1
- private final val printLubs = sys.props contains "scalac.debug.lub"
private final val traceTypeVars = sys.props contains "scalac.debug.tvar"
- /** In case anyone wants to turn off lub verification without reverting anything. */
- private final val verifyLubs = true
+ private final val breakCycles = settings.breakCycles.value
/** In case anyone wants to turn off type parameter bounds being used
* to seed type constraints.
*/
@@ -102,107 +107,11 @@ trait Types extends api.Types { self: SymbolTable =>
protected val enableTypeVarExperimentals = settings.Xexperimental.value
- /** Empty immutable maps to avoid allocations. */
- private val emptySymMap = immutable.Map[Symbol, Symbol]()
- private val emptySymCount = immutable.Map[Symbol, Int]()
-
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
var skolemizationLevel = 0
- /** A log of type variable with their original constraints. Used in order
- * to undo constraints in the case of isSubType/isSameType failure.
- */
- lazy val undoLog = newUndoLog
-
- protected def newUndoLog = new UndoLog
-
- class UndoLog extends Clearable {
- private type UndoPairs = List[(TypeVar, TypeConstraint)]
- //OPT this method is public so we can do `manual inlining`
- var log: UndoPairs = List()
-
- /*
- * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
- *
- * The idea behind explicit locking mechanism is that all public methods that access mutable state
- * will have to obtain the lock for their entire execution so both reads and writes can be kept in
- * right order. Originally, that was achieved by overriding those public methods in
- * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
- * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
- * can go away.
- *
- * By using explicit locking we can achieve inlining.
- *
- * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
- * places implementation of `undo` or `undoUnless`). This should be changed back to protected
- * once inliner is fixed.
- */
- def lock(): Unit = ()
- def unlock(): Unit = ()
-
- // register with the auto-clearing cache manager
- perRunCaches.recordCache(this)
-
- /** Undo all changes to constraints to type variables upto `limit`. */
- //OPT this method is public so we can do `manual inlining`
- def undoTo(limit: UndoPairs) {
- assertCorrectThread()
- while ((log ne limit) && log.nonEmpty) {
- val (tv, constr) = log.head
- tv.constr = constr
- log = log.tail
- }
- }
-
- /** No sync necessary, because record should only
- * be called from within a undo or undoUnless block,
- * which is already synchronized.
- */
- private[reflect] def record(tv: TypeVar) = {
- log ::= ((tv, tv.constr.cloneInternal))
- }
-
- def clear() {
- lock()
- try {
- if (settings.debug.value)
- self.log("Clearing " + log.size + " entries from the undoLog.")
- log = Nil
- } finally unlock()
- }
- def size = {
- lock()
- try log.size finally unlock()
- }
-
- // `block` should not affect constraints on typevars
- def undo[T](block: => T): T = {
- lock()
- try {
- val before = log
-
- try block
- finally undoTo(before)
- } finally unlock()
- }
-
- // if `block` evaluates to false, it should not affect constraints on typevars
- def undoUnless(block: => Boolean): Boolean = {
- lock()
- try {
- val before = log
- var result = false
-
- try result = block
- finally if (!result) undoTo(before)
-
- result
- } finally unlock()
- }
- }
-
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
@@ -223,7 +132,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def isTrivial = underlying.isTrivial
override def isHigherKinded: Boolean = underlying.isHigherKinded
override def typeConstructor: Type = underlying.typeConstructor
- override def isNotNull = underlying.isNotNull
override def isError = underlying.isError
override def isErroneous = underlying.isErroneous
override def isStable: Boolean = underlying.isStable
@@ -256,7 +164,14 @@ trait Types extends api.Types { self: SymbolTable =>
* forwarded here. Some operations are rewrapped again.
*/
trait RewrappingTypeProxy extends SimpleTypeProxy {
- protected def maybeRewrap(newtp: Type) = if (newtp eq underlying) this else rewrap(newtp)
+ protected def maybeRewrap(newtp: Type) = (
+ if (newtp eq underlying) this
+ // BoundedWildcardTypes reach here during erroneous compilation: neg/t6258
+ // Higher-kinded exclusion is because [x]CC[x] compares =:= to CC: pos/t3800
+ // Otherwise, if newtp =:= underlying, don't rewrap it.
+ else if (!newtp.isWildcard && !newtp.isHigherKinded && (newtp =:= underlying)) this
+ else rewrap(newtp)
+ )
protected def rewrap(newtp: Type): Type
// the following are all operations in class Type that are overridden in some subclass
@@ -272,7 +187,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def params: List[Symbol] = List()
override def paramTypes: List[Type] = List()
override def typeArgs = underlying.typeArgs
- override def notNull = maybeRewrap(underlying.notNull)
override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = underlying.instantiateTypeParams(formals, actuals)
override def skolemizeExistential(owner: Symbol, origin: AnyRef) = underlying.skolemizeExistential(owner, origin)
override def normalize = maybeRewrap(underlying.normalize)
@@ -297,7 +211,6 @@ trait Types extends api.Types { self: SymbolTable =>
abstract class TypeApiImpl extends TypeApi { this: Type =>
def declaration(name: Name): Symbol = decl(name)
- def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name)
def declarations = decls
def typeArguments = typeArgs
def erasure = this match {
@@ -361,9 +274,6 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def isVolatile: Boolean = false
- /** Is this type guaranteed not to have `null` as a value? */
- def isNotNull: Boolean = false
-
/** Is this type a structural refinement type (it ''refines'' members that have not been inherited) */
def isStructuralRefinement: Boolean = false
@@ -384,15 +294,11 @@ trait Types extends api.Types { self: SymbolTable =>
/** Is this type produced as a repair for an error? */
def isErroneous: Boolean = ErroneousCollector.collect(this)
- /** Does this type denote a reference type which can be null? */
- // def isNullable: Boolean = false
-
/** Can this type only be subtyped by bottom types?
* This is assessed to be the case if the class is final,
* and all type parameters (if any) are invariant.
*/
- def isFinalType: Boolean =
- typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant) && prefix.isStable
+ def isFinalType = typeSymbol.hasOnlyBottomSubclasses && prefix.isStable
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
@@ -489,7 +395,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** For a class with nonEmpty parents, the first parent.
* Otherwise some specific fixed top type.
*/
- def firstParent = if (parents.nonEmpty) parents.head else ObjectClass.tpe
+ def firstParent = if (parents.nonEmpty) parents.head else ObjectTpe
/** For a typeref or single-type, the prefix of the normalized type (@see normalize).
* NoType for all other types. */
@@ -524,11 +430,6 @@ trait Types extends api.Types { self: SymbolTable =>
/** Only used for dependent method types. */
def resultApprox: Type = ApproximateDependentMap(resultType)
- /** If this is a TypeRef `clazz`[`T`], return the argument `T`
- * otherwise return this type
- */
- def remove(clazz: Symbol): Type = this
-
/** For a curried/nullary method or poly type its non-method result type,
* the type itself for all other types */
def finalResultType: Type = this
@@ -557,13 +458,6 @@ trait Types extends api.Types { self: SymbolTable =>
* the empty list for all other types */
def boundSyms: immutable.Set[Symbol] = emptySymbolSet
- /** Mixin a NotNull trait unless type already has one
- * ...if the option is given, since it is causing typing bugs.
- */
- def notNull: Type =
- if (!settings.Ynotnull.value || isNotNull || phase.erasedTypes) this
- else NotNullType(this)
-
/** Replace formal type parameter symbols with actual type arguments.
*
* Amounts to substitution except for higher-kinded types. (See overridden method in TypeRef) -- @M
@@ -589,6 +483,26 @@ trait Types extends api.Types { self: SymbolTable =>
* Example: (in the below, `<List>` is the type constructor of List)
* TypeRef(pre, `<List>`, List()) is replaced by
* PolyType(X, TypeRef(pre, `<List>`, List(X)))
+ *
+ * Discussion: normalize is NOT usually what you want to be calling.
+ * The (very real) danger with normalize is that it will force types
+ * which would not otherwise have been forced, leading to mysterious
+ * behavioral differences, cycles, and other elements of mysteries.
+ * Under most conditions the method you should be calling is `dealiasWiden`
+ * (see that method for more info.)
+ *
+ * Here are a few of the side-effect-trail-leaving methods called
+ * by various implementations of normalize:
+ *
+ * - sym.info
+ * - tpe.etaExpand
+ * - tpe.betaReduce
+ * - tpe.memberType
+ * - sym.nextOverriddenSymbol
+ * - constraint.inst
+ *
+ * If you've been around the compiler a while that list must fill
+ * your heart with fear.
*/
def normalize = this // @MAT
@@ -598,6 +512,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** Repeatedly apply widen and dealias until they have no effect.
* This compensates for the fact that type aliases can hide beneath
* singleton types and singleton types can hide inside type aliases.
+ * !!! - and yet it is still inadequate, because aliases and singletons
+ * might lurk in the upper bounds of an abstract type. See SI-7051.
*/
def dealiasWiden: Type = (
if (this ne widen) widen.dealiasWiden
@@ -686,16 +602,6 @@ trait Types extends api.Types { self: SymbolTable =>
def nonPrivateMember(name: Name): Symbol =
memberBasedOnName(name, BridgeAndPrivateFlags)
- /** All members with the given flags, excluding bridges.
- */
- def membersWithFlags(requiredFlags: Long): Scope =
- membersBasedOnFlags(BridgeFlags, requiredFlags)
-
- /** All non-private members with the given flags, excluding bridges.
- */
- def nonPrivateMembersWithFlags(requiredFlags: Long): Scope =
- membersBasedOnFlags(BridgeAndPrivateFlags, requiredFlags)
-
/** The non-private member with given name, admitting members with given flags `admit`.
* "Admitting" refers to the fact that members with a PRIVATE, BRIDGE, or VBRIDGE
* flag are usually excluded from findMember results, but supplying any of those flags
@@ -716,10 +622,9 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def membersBasedOnFlags(excludedFlags: Long, requiredFlags: Long): Scope =
findMembers(excludedFlags, requiredFlags)
-// findMember(nme.ANYNAME, excludedFlags, requiredFlags, false).alternatives
def memberBasedOnName(name: Name, excludedFlags: Long): Symbol =
- findMember(name, excludedFlags, 0, false)
+ findMember(name, excludedFlags, 0, stableOnly = false)
/** The least type instance of given class which is a supertype
* of this type. Example:
@@ -750,7 +655,7 @@ trait Types extends api.Types { self: SymbolTable =>
)
if (trivial) this
else {
- val m = new AsSeenFromMap(pre.normalize, clazz)
+ val m = newAsSeenFromMap(pre.normalize, clazz)
val tp = m(this)
val tp1 = existentialAbstraction(m.capturedParams, tp)
@@ -770,6 +675,7 @@ trait Types extends api.Types { self: SymbolTable =>
* }}}
*/
def memberInfo(sym: Symbol): Type = {
+ require(sym ne NoSymbol, this)
sym.info.asSeenFrom(this, sym.owner)
}
@@ -830,7 +736,6 @@ trait Types extends api.Types { self: SymbolTable =>
else substThis(from, to).substSym(symsFrom, symsTo)
/** Returns all parts of this type which satisfy predicate `p` */
- def filter(p: Type => Boolean): List[Type] = new FilterTypeCollector(p) collect this
def withFilter(p: Type => Boolean) = new FilterMapForeach(p)
class FilterMapForeach(p: Type => Boolean) extends FilterTypeCollector(p){
@@ -838,6 +743,8 @@ trait Types extends api.Types { self: SymbolTable =>
def map[T](f: Type => T): List[T] = collect(Type.this) map f
}
+ @inline final def orElse(alt: => Type): Type = if (this ne NoType) this else alt
+
/** Returns optionally first type (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
*/
@@ -860,9 +767,6 @@ trait Types extends api.Types { self: SymbolTable =>
/** Does this type contain a reference to this symbol? */
def contains(sym: Symbol): Boolean = new ContainsCollector(sym).collect(this)
- /** Does this type contain a reference to this type */
- def containsTp(tp: Type): Boolean = new ContainsTypeCollector(tp).collect(this)
-
/** Is this type a subtype of that type? */
def <:<(that: Type): Boolean = {
if (Statistics.canEnable) stat_<:<(that)
@@ -874,23 +778,27 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** Is this type a subtype of that type in a pattern context?
- * Any type arguments on the right hand side are replaced with
+ * Dummy type arguments on the right hand side are replaced with
* fresh existentials, except for Arrays.
*
* See bug1434.scala for an example of code which would fail
* if only a <:< test were applied.
*/
- def matchesPattern(that: Type): Boolean = {
- (this <:< that) || ((this, that) match {
- case (TypeRef(_, ArrayClass, List(arg1)), TypeRef(_, ArrayClass, List(arg2))) if arg2.typeSymbol.typeParams.nonEmpty =>
- arg1 matchesPattern arg2
- case (_, TypeRef(_, _, args)) =>
- val newtp = existentialAbstraction(args map (_.typeSymbol), that)
- !(that =:= newtp) && (this <:< newtp)
- case _ =>
- false
- })
- }
+ def matchesPattern(that: Type): Boolean = (this <:< that) || (that match {
+ case ArrayTypeRef(elem2) if elem2.typeConstructor.isHigherKinded =>
+ this match {
+ case ArrayTypeRef(elem1) => elem1 matchesPattern elem2
+ case _ => false
+ }
+ case TypeRef(_, sym, args) =>
+ val that1 = existentialAbstraction(args map (_.typeSymbol), that)
+ (that ne that1) && (this <:< that1) && {
+ log(s"$this.matchesPattern($that) depended on discarding args and testing <:< $that1")
+ true
+ }
+ case _ =>
+ false
+ })
def stat_<:<(that: Type): Boolean = {
if (Statistics.canEnable) Statistics.incCounter(subtypeCount)
@@ -921,12 +829,7 @@ trait Types extends api.Types { self: SymbolTable =>
(this eq that) ||
(if (explainSwitch) explain("=", isSameType, this, that)
else isSameType(this, that))
- );
-
- /** Does this type implement symbol `sym` with same or stronger type? */
- def specializes(sym: Symbol): Boolean =
- if (explainSwitch) explain("specializes", specializesSym, this, sym)
- else specializesSym(this, sym)
+ )
/** Is this type close enough to that type so that members
* with the two type would override each other?
@@ -943,7 +846,7 @@ trait Types extends api.Types { self: SymbolTable =>
def matches(that: Type): Boolean = matchesType(this, that, !phase.erasedTypes)
/** Same as matches, except that non-method types are always assumed to match. */
- def looselyMatches(that: Type): Boolean = matchesType(this, that, true)
+ def looselyMatches(that: Type): Boolean = matchesType(this, that, alwaysMatchSimple = true)
/** The shortest sorted upwards closed array of types that contains
* this type as first element.
@@ -971,7 +874,7 @@ trait Types extends api.Types { self: SymbolTable =>
def baseTypeSeqDepth: Int = 1
/** The list of all baseclasses of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
+ * in linearization order, starting with the class itself and ending
* in class Any.
*/
def baseClasses: List[Symbol] = List()
@@ -1058,7 +961,7 @@ trait Types extends api.Types { self: SymbolTable =>
var sym: Symbol = NoSymbol
var e: ScopeEntry = decls.lookupEntry(name)
while (e ne null) {
- if (!e.sym.hasFlag(excludedFlags)) {
+ if (!e.sym.hasFlag(excludedFlags.toLong)) {
if (sym == NoSymbol) sym = e.sym
else {
if (alts.isEmpty) alts = sym :: Nil
@@ -1072,69 +975,66 @@ trait Types extends api.Types { self: SymbolTable =>
}
def findMembers(excludedFlags: Long, requiredFlags: Long): Scope = {
- // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
- // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
- // without this, the matchesType call would lead to type variables on both sides
- // of a subtyping/equality judgement, which can lead to recursive types being constructed.
- // See (t0851) for a situation where this happens.
- val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
-
- if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
-
- //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
- var members: Scope = null
- var required = requiredFlags
- var excluded = excludedFlags | DEFERRED
- var continue = true
- var self: Type = null
- while (continue) {
- continue = false
- val bcs0 = baseClasses
- var bcs = bcs0
- while (!bcs.isEmpty) {
- val decls = bcs.head.info.decls
- var entry = decls.elems
- while (entry ne null) {
- val sym = entry.sym
- val flags = sym.flags
- if ((flags & required) == required) {
- val excl = flags & excluded
- if (excl == 0L &&
- (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
- (bcs eq bcs0) ||
- (flags & PrivateLocal) != PrivateLocal ||
- (bcs0.head.hasTransOwner(bcs.head)))) {
- if (members eq null) members = newFindMemberScope
- var others: ScopeEntry = members.lookupEntry(sym.name)
- var symtpe: Type = null
- while ((others ne null) && {
- val other = others.sym
- (other ne sym) &&
- ((other.owner eq sym.owner) ||
- (flags & PRIVATE) != 0 || {
- if (self eq null) self = narrowForFindMember(this)
- if (symtpe eq null) symtpe = self.memberType(sym)
- !(self.memberType(other) matches symtpe)
- })}) {
- others = members lookupNextEntry others
+ def findMembersInternal: Scope = {
+ var members: Scope = null
+ if (Statistics.canEnable) Statistics.incCounter(findMembersCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMembersNanos) else null
+
+ //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
+ var required = requiredFlags
+ var excluded = excludedFlags | DEFERRED
+ var continue = true
+ var self: Type = null
+ while (continue) {
+ continue = false
+ val bcs0 = baseClasses
+ var bcs = bcs0
+ while (!bcs.isEmpty) {
+ val decls = bcs.head.info.decls
+ var entry = decls.elems
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
+ (// omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ (bcs0.head.hasTransOwner(bcs.head)))) {
+ if (members eq null) members = newFindMemberScope
+ var others: ScopeEntry = members.lookupEntry(sym.name)
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.sym
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
+ })}) {
+ others = members lookupNextEntry others
+ }
+ if (others eq null) members enter sym
+ } else if (excl == DEFERRED) {
+ continue = true
}
- if (others eq null) members enter sym
- } else if (excl == DEFERRED) {
- continue = true
}
- }
- entry = entry.next
- } // while (entry ne null)
- // excluded = excluded | LOCAL
- bcs = bcs.tail
- } // while (!bcs.isEmpty)
- required |= DEFERRED
- excluded &= ~(DEFERRED.toLong)
- } // while (continue)
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- if (members eq null) EmptyScope else members
+ entry = entry.next
+ } // while (entry ne null)
+ // excluded = excluded | LOCAL
+ bcs = bcs.tail
+ } // while (!bcs.isEmpty)
+ required |= DEFERRED
+ excluded &= ~(DEFERRED.toLong)
+ } // while (continue)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (members eq null) EmptyScope else members
+ }
+
+ if (this.isGround) findMembersInternal
+ else suspendingTypeVars(typeVarsInType(this))(findMembersInternal)
}
/**
@@ -1148,110 +1048,106 @@ trait Types extends api.Types { self: SymbolTable =>
*/
//TODO: use narrow only for modules? (correct? efficiency gain?)
def findMember(name: Name, excludedFlags: Long, requiredFlags: Long, stableOnly: Boolean): Symbol = {
- // if this type contains type variables, put them to sleep for a while -- don't just wipe them out by
- // replacing them by the corresponding type parameter, as that messes up (e.g.) type variables in type refinements
- // without this, the matchesType call would lead to type variables on both sides
- // of a subtyping/equality judgement, which can lead to recursive types being constructed.
- // See (t0851) for a situation where this happens.
- val suspension: List[TypeVar] = if (this.isGround) null else suspendTypeVarsInType(this)
-
- if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
-
- //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
- var member: Symbol = NoSymbol
- var members: List[Symbol] = null
- var lastM: ::[Symbol] = null
- var membertpe: Type = null
- var required = requiredFlags
- var excluded = excludedFlags | DEFERRED
- var continue = true
- var self: Type = null
-
- while (continue) {
- continue = false
- val bcs0 = baseClasses
- var bcs = bcs0
- // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
- def admitPrivateLocal(owner: Symbol): Boolean = {
- val selectorClass = this match {
- case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
- case _ => bcs0.head
+ def findMemberInternal: Symbol = {
+ var member: Symbol = NoSymbol
+ var members: List[Symbol] = null
+ var lastM: ::[Symbol] = null
+ if (Statistics.canEnable) Statistics.incCounter(findMemberCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null
+
+ //Console.println("find member " + name.decode + " in " + this + ":" + this.baseClasses)//DEBUG
+ var membertpe: Type = null
+ var required = requiredFlags
+ var excluded = excludedFlags | DEFERRED
+ var continue = true
+ var self: Type = null
+
+ while (continue) {
+ continue = false
+ val bcs0 = baseClasses
+ var bcs = bcs0
+ // omit PRIVATE LOCALS unless selector class is contained in class owning the def.
+ def admitPrivateLocal(owner: Symbol): Boolean = {
+ val selectorClass = this match {
+ case tt: ThisType => tt.sym // SI-7507 the first base class is not necessarily the selector class.
+ case _ => bcs0.head
+ }
+ selectorClass.hasTransOwner(owner)
}
- selectorClass.hasTransOwner(owner)
- }
- while (!bcs.isEmpty) {
- val decls = bcs.head.info.decls
- var entry = decls.lookupEntry(name)
- while (entry ne null) {
- val sym = entry.sym
- val flags = sym.flags
- if ((flags & required) == required) {
- val excl = flags & excluded
- if (excl == 0L &&
- (
- (bcs eq bcs0) ||
- (flags & PrivateLocal) != PrivateLocal ||
- admitPrivateLocal(bcs.head))) {
- if (name.isTypeName || stableOnly && sym.isStable) {
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- return sym
- } else if (member eq NoSymbol) {
- member = sym
- } else if (members eq null) {
- if ((member ne sym) &&
- ((member.owner eq sym.owner) ||
- (flags & PRIVATE) != 0 || {
- if (self eq null) self = narrowForFindMember(this)
- if (membertpe eq null) membertpe = self.memberType(member)
- !(membertpe matches self.memberType(sym))
- })) {
- lastM = new ::(sym, null)
- members = member :: lastM
- }
- } else {
- var others: List[Symbol] = members
- var symtpe: Type = null
- while ((others ne null) && {
- val other = others.head
- (other ne sym) &&
- ((other.owner eq sym.owner) ||
+ while (!bcs.isEmpty) {
+ val decls = bcs.head.info.decls
+ var entry = decls.lookupEntry(name)
+ while (entry ne null) {
+ val sym = entry.sym
+ val flags = sym.flags
+ if ((flags & required) == required) {
+ val excl = flags & excluded
+ if (excl == 0L &&
+ (
+ (bcs eq bcs0) ||
+ (flags & PrivateLocal) != PrivateLocal ||
+ admitPrivateLocal(bcs.head))) {
+ if (name.isTypeName || (stableOnly && sym.isStable && !sym.hasVolatileType)) {
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ return sym
+ } else if (member eq NoSymbol) {
+ member = sym
+ } else if (members eq null) {
+ if ((member ne sym) &&
+ ((member.owner eq sym.owner) ||
(flags & PRIVATE) != 0 || {
if (self eq null) self = narrowForFindMember(this)
- if (symtpe eq null) symtpe = self.memberType(sym)
- !(self.memberType(other) matches symtpe)
- })}) {
- others = others.tail
- }
- if (others eq null) {
- val lastM1 = new ::(sym, null)
- lastM.tl = lastM1
- lastM = lastM1
+ if (membertpe eq null) membertpe = self.memberType(member)
+ !(membertpe matches self.memberType(sym))
+ })) {
+ lastM = new ::(sym, null)
+ members = member :: lastM
+ }
+ } else {
+ var others: List[Symbol] = members
+ var symtpe: Type = null
+ while ((others ne null) && {
+ val other = others.head
+ (other ne sym) &&
+ ((other.owner eq sym.owner) ||
+ (flags & PRIVATE) != 0 || {
+ if (self eq null) self = narrowForFindMember(this)
+ if (symtpe eq null) symtpe = self.memberType(sym)
+ !(self.memberType(other) matches symtpe)
+ })}) {
+ others = others.tail
+ }
+ if (others eq null) {
+ val lastM1 = new ::(sym, null)
+ lastM.tl = lastM1
+ lastM = lastM1
+ }
}
+ } else if (excl == DEFERRED) {
+ continue = true
}
- } else if (excl == DEFERRED) {
- continue = true
}
- }
- entry = decls lookupNextEntry entry
- } // while (entry ne null)
- // excluded = excluded | LOCAL
- bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
- } // while (!bcs.isEmpty)
- required |= DEFERRED
- excluded &= ~(DEFERRED.toLong)
- } // while (continue)
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- if (suspension ne null) suspension foreach (_.suspended = false)
- if (members eq null) {
- if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
- member
- } else {
- if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
- lastM.tl = Nil
- baseClasses.head.newOverloaded(this, members)
+ entry = decls lookupNextEntry entry
+ } // while (entry ne null)
+ // excluded = excluded | LOCAL
+ bcs = if (name == nme.CONSTRUCTOR) Nil else bcs.tail
+ } // while (!bcs.isEmpty)
+ required |= DEFERRED
+ excluded &= ~(DEFERRED.toLong)
+ } // while (continue)
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ if (members eq null) {
+ if (member == NoSymbol) if (Statistics.canEnable) Statistics.incCounter(noMemberCount)
+ member
+ } else {
+ if (Statistics.canEnable) Statistics.incCounter(multMemberCount)
+ lastM.tl = Nil
+ baseClasses.head.newOverloaded(this, members)
+ }
}
+
+ if (this.isGround) findMemberInternal
+ else suspendingTypeVars(typeVarsInType(this))(findMemberInternal)
}
/** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
@@ -1280,10 +1176,6 @@ trait Types extends api.Types { self: SymbolTable =>
def setAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
def withAnnotations(annots: List[AnnotationInfo]): Type = annotatedType(annots, this)
- /** Remove any annotations from this type and from any
- * types embedded in this type. */
- def stripAnnotations = StripAnnotationsMap(this)
-
/** Set the self symbol of an annotated type, or do nothing
* otherwise. */
def withSelfsym(sym: Symbol) = this
@@ -1316,17 +1208,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq
override def baseTypeSeqDepth: Int = supertype.baseTypeSeqDepth
override def baseClasses: List[Symbol] = supertype.baseClasses
- override def isNotNull = supertype.isNotNull
- }
-
- case class NotNullType(override val underlying: Type) extends SubType with RewrappingTypeProxy {
- def supertype = underlying
- protected def rewrap(newtp: Type): Type = NotNullType(newtp)
- override def isNotNull: Boolean = true
- override def notNull = this
- override def deconst: Type = underlying //todo: needed?
- override def safeToString: String = underlying.toString + " with NotNull"
- override def kind = "NotNullType"
}
/** A base class for types that represent a single value
@@ -1376,7 +1257,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def baseType(clazz: Symbol): Type = this
override def safeToString: String = "<error>"
override def narrow: Type = this
- // override def isNullable: Boolean = true
override def kind = "ErrorType"
}
@@ -1386,7 +1266,6 @@ trait Types extends api.Types { self: SymbolTable =>
case object WildcardType extends Type {
override def isWildcard = true
override def safeToString: String = "?"
- // override def isNullable: Boolean = true
override def kind = "WildcardType"
}
/** BoundedWildcardTypes, used only during type inference, are created in
@@ -1411,7 +1290,6 @@ trait Types extends api.Types { self: SymbolTable =>
case object NoType extends Type {
override def isTrivial: Boolean = true
override def safeToString: String = "<notype>"
- // override def isNullable: Boolean = true
override def kind = "NoType"
}
@@ -1421,7 +1299,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def isStable: Boolean = true
override def prefixString = ""
override def safeToString: String = "<noprefix>"
- // override def isNullable: Boolean = true
override def kind = "NoPrefixType"
}
@@ -1434,15 +1311,13 @@ trait Types extends api.Types { self: SymbolTable =>
abort(s"ThisType($sym) for sym which is not a class")
}
- //assert(sym.isClass && !sym.isModuleClass || sym.isRoot, sym)
override def isTrivial: Boolean = sym.isPackageClass
- override def isNotNull = true
override def typeSymbol = sym
override def underlying: Type = sym.typeOfThis
override def isVolatile = false
override def isHigherKinded = sym.isRefinementClass && underlying.isHigherKinded
override def prefixString =
- if (settings.debug.value) sym.nameString + ".this."
+ if (settings.debug) sym.nameString + ".this."
else if (sym.isAnonOrRefinementClass) "this."
else if (sym.isOmittablePrefix) ""
else if (sym.isModuleClass) sym.fullNameString + "."
@@ -1460,7 +1335,7 @@ trait Types extends api.Types { self: SymbolTable =>
def apply(sym: Symbol): Type = (
if (!phase.erasedTypes) unique(new UniqueThisType(sym))
else if (sym.isImplClass) sym.typeOfThis
- else sym.tpe
+ else sym.tpe_*
)
}
@@ -1475,8 +1350,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def isGround = sym.isPackageClass || pre.isGround
- // override def isNullable = underlying.isNullable
- override def isNotNull = underlying.isNotNull
private[reflect] var underlyingCache: Type = NoType
private[reflect] var underlyingPeriod = NoPeriod
override def underlying: Type = {
@@ -1491,7 +1364,7 @@ trait Types extends api.Types { self: SymbolTable =>
// more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym))
override def isImmediatelyDependent = (sym ne NoSymbol) && (sym.owner.isMethod && sym.isValueParameter)
- override def isVolatile : Boolean = underlying.isVolatile && !sym.isStable
+ override def isVolatile : Boolean = underlying.isVolatile && (sym.hasVolatileType || !sym.isStable)
/*
override def narrow: Type = {
if (phase.erasedTypes) this
@@ -1531,7 +1404,7 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.underlyingPeriod = currentPeriod
if (!isValid(period)) {
// [Eugene to Paul] needs review
- tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType;
+ tpe.underlyingCache = if (tpe.sym == NoSymbol) ThisType(rootMirror.RootClass) else tpe.pre.memberType(tpe.sym).resultType
assert(tpe.underlyingCache ne tpe, tpe)
}
}
@@ -1543,7 +1416,6 @@ trait Types extends api.Types { self: SymbolTable =>
if (trivial == UNKNOWN) trivial = fromBoolean(thistpe.isTrivial && supertpe.isTrivial)
toBoolean(trivial)
}
- override def isNotNull = true;
override def typeSymbol = thistpe.typeSymbol
override def underlying = supertpe
override def prefix: Type = supertpe.prefix
@@ -1571,23 +1443,36 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
- private def lowerString = if (emptyLowerBound) "" else " >: " + lo
- private def upperString = if (emptyUpperBound) "" else " <: " + hi
- private def emptyLowerBound = typeIsNothing(lo)
- private def emptyUpperBound = typeIsAny(hi)
+ private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard
+ private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard
def isEmptyBounds = emptyLowerBound && emptyUpperBound
- // override def isNullable: Boolean = NullClass.tpe <:< lo;
- override def safeToString = lowerString + upperString
+ override def safeToString = scalaNotation(_.toString)
+
+ /** Bounds notation used in Scala sytanx.
+ * For example +This <: scala.collection.generic.Sorted[K,This].
+ */
+ private[internal] def scalaNotation(typeString: Type => String): String = {
+ (if (emptyLowerBound) "" else " >: " + typeString(lo)) +
+ (if (emptyUpperBound) "" else " <: " + typeString(hi))
+ }
+ /** Bounds notation used in http://adriaanm.github.com/files/higher.pdf.
+ * For example *(scala.collection.generic.Sorted[K,This]).
+ */
+ private[internal] def starNotation(typeString: Type => String): String = {
+ if (emptyLowerBound && emptyUpperBound) ""
+ else if (emptyLowerBound) "(" + typeString(hi) + ")"
+ else "(%s, %s)" format (typeString(lo), typeString(hi))
+ }
override def kind = "TypeBoundsType"
}
final class UniqueTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
object TypeBounds extends TypeBoundsExtractor {
- def empty: TypeBounds = apply(NothingClass.tpe, AnyClass.tpe)
- def upper(hi: Type): TypeBounds = apply(NothingClass.tpe, hi)
- def lower(lo: Type): TypeBounds = apply(lo, AnyClass.tpe)
+ def empty: TypeBounds = apply(NothingTpe, AnyTpe)
+ def upper(hi: Type): TypeBounds = apply(NothingTpe, hi)
+ def lower(lo: Type): TypeBounds = apply(lo, AnyTpe)
def apply(lo: Type, hi: Type): TypeBounds = {
unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds]
}
@@ -1657,20 +1542,48 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def narrow: Type = typeSymbol.thisType
- override def isNotNull: Boolean = parents exists typeIsNotNull
override def isStructuralRefinement: Boolean =
typeSymbol.isAnonOrRefinementClass && (decls exists symbolIsPossibleInRefinement)
- // override def isNullable: Boolean =
- // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType);
-
override def safeToString: String = parentsString(parents) + (
- (if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ (if (settings.debug || parents.isEmpty || (decls.elems ne null))
fullyInitializeScope(decls).mkString("{", "; ", "}") else "")
)
}
+ protected def computeBaseClasses(tpe: Type): List[Symbol] = {
+ val parents = tpe.parents // adriaan says tpe.parents does work sometimes, so call it only once
+ val baseTail = (
+ if (parents.isEmpty || parents.head.isInstanceOf[PackageTypeRef]) Nil
+ else {
+ //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
+ // optimized, since this seems to be performance critical
+ val superclazz = parents.head // parents.isEmpty was already excluded
+ var mixins = parents.tail
+ val sbcs = superclazz.baseClasses
+ var bcs = sbcs
+ def isNew(clazz: Symbol): Boolean = (
+ superclazz.baseTypeIndex(clazz) < 0 &&
+ { var p = bcs
+ while ((p ne sbcs) && (p.head != clazz)) p = p.tail
+ p eq sbcs
+ }
+ )
+ while (!mixins.isEmpty) {
+ def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
+ if (mbcs.isEmpty) bcs
+ else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
+ else addMixinBaseClasses(mbcs.tail)
+ bcs = addMixinBaseClasses(mixins.head.baseClasses)
+ mixins = mixins.tail
+ }
+ bcs
+ }
+ )
+ tpe.typeSymbol :: baseTail
+ }
+
protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = {
val period = tpe.baseTypeSeqPeriod
if (period != currentPeriod) {
@@ -1690,7 +1603,7 @@ trait Types extends api.Types { self: SymbolTable =>
val paramToVarMap = varToParamMap map (_.swap)
val varToParam = new TypeMap {
def apply(tp: Type) = varToParamMap get tp match {
- case Some(sym) => sym.tpe
+ case Some(sym) => sym.tpe_*
case _ => mapOver(tp)
}
}
@@ -1709,7 +1622,7 @@ trait Types extends api.Types { self: SymbolTable =>
tpe.baseTypeSeqCache = undetBaseTypeSeq
tpe.baseTypeSeqCache =
if (tpe.typeSymbol.isRefinementClass)
- tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe)
+ tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe_*)
else
compoundBaseTypeSeq(tpe)
} finally {
@@ -1731,41 +1644,61 @@ trait Types extends api.Types { self: SymbolTable =>
throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol)
}
- protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = {
- def computeBaseClasses: List[Symbol] =
- if (tpe.parents.isEmpty) List(tpe.typeSymbol)
- else {
- //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG
- // optimized, since this seems to be performance critical
- val superclazz = tpe.firstParent
- var mixins = tpe.parents.tail
- val sbcs = superclazz.baseClasses
- var bcs = sbcs
- def isNew(clazz: Symbol): Boolean =
- superclazz.baseTypeIndex(clazz) < 0 &&
- { var p = bcs;
- while ((p ne sbcs) && (p.head != clazz)) p = p.tail;
- p eq sbcs
- }
- while (!mixins.isEmpty) {
- def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] =
- if (mbcs.isEmpty) bcs
- else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail)
- else addMixinBaseClasses(mbcs.tail)
- bcs = addMixinBaseClasses(mixins.head.baseClasses)
- mixins = mixins.tail
+ object baseClassesCycleMonitor {
+ private var open: List[Symbol] = Nil
+ @inline private def cycleLog(msg: => String) {
+ if (settings.debug)
+ Console.err.println(msg)
+ }
+ def size = open.size
+ def push(clazz: Symbol) {
+ cycleLog("+ " + (" " * size) + clazz.fullNameString)
+ open ::= clazz
+ }
+ def pop(clazz: Symbol) {
+ assert(open.head eq clazz, (clazz, open))
+ open = open.tail
+ }
+ def isOpen(clazz: Symbol) = open contains clazz
+ }
+
+ protected def defineBaseClassesOfCompoundType(tpe: CompoundType) {
+ def define() = defineBaseClassesOfCompoundType(tpe, force = false)
+ if (!breakCycles || isPastTyper) define()
+ else tpe match {
+ // non-empty parents helpfully excludes all package classes
+ case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass =>
+ // Cycle: force update
+ if (baseClassesCycleMonitor isOpen clazz)
+ defineBaseClassesOfCompoundType(tpe, force = true)
+ else {
+ baseClassesCycleMonitor push clazz
+ try define()
+ finally baseClassesCycleMonitor pop clazz
}
- tpe.typeSymbol :: bcs
- }
+ case _ =>
+ define()
+ }
+ }
+ private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) {
val period = tpe.baseClassesPeriod
- if (period != currentPeriod) {
+ if (period == currentPeriod) {
+ if (force && breakCycles) {
+ def what = tpe.typeSymbol + " in " + tpe.typeSymbol.owner.fullNameString
+ val bcs = computeBaseClasses(tpe)
+ tpe.baseClassesCache = bcs
+ warning(s"Breaking cycle in base class computation of $what ($bcs)")
+ }
+ }
+ else {
tpe.baseClassesPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null
try {
tpe.baseClassesCache = null
- tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail)
- } finally {
+ tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail)
+ }
+ finally {
if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
}
}
@@ -1960,8 +1893,8 @@ trait Types extends api.Types { self: SymbolTable =>
tp match {
case tr @ TypeRef(_, sym, args) if args.nonEmpty =>
val tparams = tr.initializedTypeParams
- if (settings.debug.value && !sameLength(tparams, args))
- debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args)
+ if (settings.debug && !sameLength(tparams, args))
+ devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args")
foreach2(tparams, args) { (tparam1, arg) =>
if (arg contains tparam) {
@@ -2004,7 +1937,7 @@ trait Types extends api.Types { self: SymbolTable =>
var change = false
for ((from, targets) <- refs(NonExpansive).iterator)
for (target <- targets) {
- var thatInfo = classInfo(target)
+ val thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(NonExpansive, from, thatInfo.getRefs(NonExpansive, target))
@@ -2012,7 +1945,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
for ((from, targets) <- refs(Expansive).iterator)
for (target <- targets) {
- var thatInfo = classInfo(target)
+ val thatInfo = classInfo(target)
if (thatInfo.state != Initialized)
change = change | thatInfo.propagate()
addRefs(Expansive, from, thatInfo.getRefs(NonExpansive, target))
@@ -2023,15 +1956,10 @@ trait Types extends api.Types { self: SymbolTable =>
change
}
- // override def isNullable: Boolean =
- // symbol == AnyClass ||
- // symbol != NothingClass && (symbol isSubClass ObjectClass) && !(symbol isSubClass NonNullClass);
-
- // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull;
override def kind = "ClassInfoType"
override def safeToString =
- if (settings.debug.value || decls.size > 1)
+ if (settings.debug || decls.size > 1)
formattedToString
else
super.safeToString
@@ -2040,7 +1968,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def formattedToString: String =
parents.mkString("\n with ") + (
- if (settings.debug.value || parents.isEmpty || (decls.elems ne null))
+ if (settings.debug || parents.isEmpty || (decls.elems ne null))
fullyInitializeScope(decls).mkString(" {\n ", "\n ", "\n}")
else ""
)
@@ -2052,19 +1980,14 @@ trait Types extends api.Types { self: SymbolTable =>
extends ClassInfoType(List(), decls, clazz)
/** A class representing a constant type.
- *
- * @param value ...
*/
abstract case class ConstantType(value: Constant) extends SingletonType with ConstantTypeApi {
override def underlying: Type = value.tpe
assert(underlying.typeSymbol != UnitClass)
override def isTrivial: Boolean = true
- override def isNotNull = value.value != null
- override def deconst: Type = underlying
+ override def deconst: Type = underlying.deconst
override def safeToString: String =
underlying.toString + "(" + value.escapedStringValue + ")"
- // override def isNullable: Boolean = value.value eq null
- // override def isNonNull: Boolean = value.value ne null
override def kind = "ConstantType"
}
@@ -2094,7 +2017,7 @@ trait Types extends api.Types { self: SymbolTable =>
// it later turns out not to have kind *. See SI-4070. Only
// logging it for now.
if (sym.typeParams.size != args.size)
- log("!!! %s.transform(%s), but tparams.isEmpty and args=".format(this, tp, args))
+ devWarning(s"$this.transform($tp), but tparams.isEmpty and args=$args")
asSeenFromOwner(tp).instantiateTypeParams(sym.typeParams, args)
}
@@ -2108,14 +2031,13 @@ trait Types extends api.Types { self: SymbolTable =>
class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
require(sym.isModuleClass, sym)
private[this] var narrowedCache: Type = _
- override def isStable = true
+ override def isStable = pre.isStable
override def narrow = {
if (narrowedCache eq null)
narrowedCache = singleType(pre, sym.sourceModule)
narrowedCache
}
- final override def isNotNull = true
override protected def finishPrefix(rest: String) = objectPrefix + rest
override def directObjectString = super.safeToString
override def toLongString = toString
@@ -2124,6 +2046,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) {
require(sym.isPackageClass, sym)
+ override def isStable = true
override protected def finishPrefix(rest: String) = packagePrefix + rest
}
class RefinementTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef {
@@ -2252,7 +2175,7 @@ trait Types extends api.Types { self: SymbolTable =>
else ErrorType
}
- // isHKSubType0 introduces synthetic type params so that
+ // isHKSubType introduces synthetic type params so that
// betaReduce can first apply sym.info to typeArgs before calling
// asSeenFrom. asSeenFrom then skips synthetic type params, which
// are used to reduce HO subtyping to first-order subtyping, but
@@ -2326,7 +2249,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass
override def bounds = thisInfo.bounds
- // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies)
override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this
override def kind = "AbstractTypeRef"
}
@@ -2359,7 +2281,7 @@ trait Types extends api.Types { self: SymbolTable =>
h = mix(h, pre.hashCode)
h = mix(h, sym.hashCode)
if (hasArgs)
- finalizeHash(mix(h, args.hashCode), 3)
+ finalizeHash(mix(h, args.hashCode()), 3)
else
finalizeHash(h, 2)
}
@@ -2420,9 +2342,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def typeSymbol = sym
override def typeSymbolDirect = sym
- override def isNotNull =
- sym.isModuleClass || sym == NothingClass || (sym isNonBottomSubClass NotNullClass) || super.isNotNull
-
override def parents: List[Type] = {
val cache = parentsCache
if (parentsPeriod == currentPeriod && cache != null) cache
@@ -2458,7 +2377,7 @@ trait Types extends api.Types { self: SymbolTable =>
// ensure that symbol is not a local copy with a name coincidence
private def needsPreString = (
- settings.debug.value
+ settings.debug
|| !shorthands(sym.fullName)
|| (sym.ownersIterator exists (s => !s.isClass))
)
@@ -2483,7 +2402,7 @@ trait Types extends api.Types { self: SymbolTable =>
case RepeatedParamClass => args.head + "*"
case ByNameParamClass => "=> " + args.head
case _ =>
- def targs = normalize.typeArgs
+ def targs = dealiasWiden.typeArgs
if (isFunctionType(this)) {
// Aesthetics: printing Function1 as T => R rather than (T) => R
@@ -2491,7 +2410,7 @@ trait Types extends api.Types { self: SymbolTable =>
// from (T1, T2) => R.
targs match {
case in :: out :: Nil if !isTupleType(in) =>
- // A => B => C should be (A => B) => C or A => (B => C).
+ // A => B => C should be (A => B) => C or A => (B => C)
// Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
// would mean => (A => B) which is a different type
val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
@@ -2509,12 +2428,12 @@ trait Types extends api.Types { self: SymbolTable =>
""
}
override def safeToString = {
- val custom = if (settings.debug.value) "" else customToString
+ val custom = if (settings.debug) "" else customToString
if (custom != "") custom
else finishPrefix(preString + sym.nameString + argsString)
}
override def prefixString = "" + (
- if (settings.debug.value)
+ if (settings.debug)
super.prefixString
else if (sym.isOmittablePrefix)
""
@@ -2528,20 +2447,28 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "TypeRef"
}
+ // No longer defined as anonymous classes in `object TypeRef` to avoid an unnecessary outer pointer.
+ private final class AliasArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AliasTypeRef
+ private final class AbstractArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with AbstractTypeRef
+ private final class ClassArgsTypeRef(pre: Type, sym: Symbol, args: List[Type]) extends ArgsTypeRef(pre, sym, args) with ClassTypeRef
+ private final class AliasNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AliasTypeRef
+ private final class AbstractNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with AbstractTypeRef
+ private final class ClassNoArgsTypeRef(pre: Type, sym: Symbol) extends NoArgsTypeRef(pre, sym) with ClassTypeRef
+
object TypeRef extends TypeRefExtractor {
def apply(pre: Type, sym: Symbol, args: List[Type]): Type = unique({
if (args.nonEmpty) {
- if (sym.isAliasType) new ArgsTypeRef(pre, sym, args) with AliasTypeRef
- else if (sym.isAbstractType) new ArgsTypeRef(pre, sym, args) with AbstractTypeRef
- else new ArgsTypeRef(pre, sym, args) with ClassTypeRef
+ if (sym.isAliasType) new AliasArgsTypeRef(pre, sym, args)
+ else if (sym.isAbstractType) new AbstractArgsTypeRef(pre, sym, args)
+ else new ClassArgsTypeRef(pre, sym, args)
}
else {
- if (sym.isAliasType) new NoArgsTypeRef(pre, sym) with AliasTypeRef
- else if (sym.isAbstractType) new NoArgsTypeRef(pre, sym) with AbstractTypeRef
+ if (sym.isAliasType) new AliasNoArgsTypeRef(pre, sym)
+ else if (sym.isAbstractType) new AbstractNoArgsTypeRef(pre, sym)
else if (sym.isRefinementClass) new RefinementTypeRef(pre, sym)
else if (sym.isPackageClass) new PackageTypeRef(pre, sym)
else if (sym.isModuleClass) new ModuleTypeRef(pre, sym)
- else new NoArgsTypeRef(pre, sym) with ClassTypeRef
+ else new ClassNoArgsTypeRef(pre, sym)
}
})
}
@@ -2553,7 +2480,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (!isValidForBaseClasses(period)) {
tpe.parentsCache = tpe.thisInfo.parents map tpe.transform
} else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641
- tpe.parentsCache = List(AnyClass.tpe)
+ tpe.parentsCache = List(AnyTpe)
}
}
}
@@ -2820,10 +2747,10 @@ trait Types extends api.Types { self: SymbolTable =>
override def safeToString: String = {
def clauses = {
val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }")
- if (settings.explaintypes.value) "(" + str + ")" else str
+ if (settings.explaintypes) "(" + str + ")" else str
}
underlying match {
- case TypeRef(pre, sym, args) if !settings.debug.value && isRepresentableWithWildcards =>
+ case TypeRef(pre, sym, args) if !settings.debug && isRepresentableWithWildcards =>
"" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]")
case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) =>
"(" + underlying + ")" + clauses
@@ -2847,7 +2774,7 @@ trait Types extends api.Types { self: SymbolTable =>
val tvars = quantifiedFresh map (tparam => TypeVar(tparam))
val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars
op(underlying1) && {
- solve(tvars, quantifiedFresh, quantifiedFresh map (x => 0), false, depth) &&
+ solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) &&
isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.constr.inst))
}
}
@@ -2865,9 +2792,13 @@ trait Types extends api.Types { self: SymbolTable =>
override def kind = "OverloadedType"
}
- def overloadedType(pre: Type, alternatives: List[Symbol]): Type =
- if (alternatives.tail.isEmpty) pre memberType alternatives.head
- else OverloadedType(pre, alternatives)
+ /** The canonical creator for OverloadedTypes.
+ */
+ def overloadedType(pre: Type, alternatives: List[Symbol]): Type = alternatives match {
+ case Nil => NoType
+ case alt :: Nil => pre memberType alt
+ case _ => OverloadedType(pre, alternatives)
+ }
/** A class remembering a type instantiation for some a set of overloaded
* polymorphic symbols.
@@ -2875,23 +2806,12 @@ trait Types extends api.Types { self: SymbolTable =>
*/
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
- pre.toString + targs.mkString("(with type arguments ", ", ", ")");
+ pre.toString + targs.mkString("(with type arguments ", ", ", ")")
+
override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
-// override def memberType(sym: Symbol) = pre.memberType(sym) match {
-// case PolyType(tparams, restp) =>
-// restp.subst(tparams, targs)
-// /* I don't think this is needed, as existential types close only over value types
-// case ExistentialType(tparams, qtpe) =>
-// existentialAbstraction(tparams, qtpe.memberType(sym))
-// */
-// case ErrorType =>
-// ErrorType
-// }
override def kind = "AntiPolyType"
}
- //private var tidCount = 0 //DEBUG
-
object HasTypeMember {
def apply(name: TypeName, tp: Type): Type = {
val bound = refinedType(List(WildcardType), NoSymbol)
@@ -2906,13 +2826,10 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- // Not used yet.
- object HasTypeParams {
- def unapply(tp: Type): Option[(List[Symbol], Type)] = tp match {
- case AnnotatedType(_, tp, _) => unapply(tp)
- case ExistentialType(tparams, qtpe) => Some((tparams, qtpe))
- case PolyType(tparams, restpe) => Some((tparams, restpe))
- case _ => None
+ object ArrayTypeRef {
+ def unapply(tp: Type) = tp match {
+ case TypeRef(_, ArrayClass, arg :: Nil) => Some(arg)
+ case _ => None
}
}
@@ -2941,10 +2858,10 @@ trait Types extends api.Types { self: SymbolTable =>
* See SI-5359.
*/
val bounds = tparam.info.bounds
- /** We can seed the type constraint with the type parameter
- * bounds as long as the types are concrete. This should lower
- * the complexity of the search even if it doesn't improve
- * any results.
+ /* We can seed the type constraint with the type parameter
+ * bounds as long as the types are concrete. This should lower
+ * the complexity of the search even if it doesn't improve
+ * any results.
*/
if (propagateParameterBoundsToTypeVars) {
val exclude = bounds.isEmptyBounds || (bounds exists typeIsNonClassType)
@@ -2985,20 +2902,6 @@ trait Types extends api.Types { self: SymbolTable =>
createTypeVar(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams, untouchable)
}
- /** Repack existential types, otherwise they sometimes get unpacked in the
- * wrong location (type inference comes up with an unexpected skolem)
- */
- def repackExistential(tp: Type): Type = (
- if (tp == NoType) tp
- else existentialAbstraction(existentialsInType(tp), tp)
- )
-
- def containsExistential(tpe: Type) =
- tpe exists typeIsExistentiallyBound
-
- def existentialsInType(tpe: Type) =
- tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
-
/** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.)
*/
class HKTypeVar(
@@ -3009,7 +2912,6 @@ trait Types extends api.Types { self: SymbolTable =>
require(params.nonEmpty, this)
override def isHigherKinded = true
- override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName)
}
/** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.)
@@ -3024,10 +2926,6 @@ trait Types extends api.Types { self: SymbolTable =>
override def params: List[Symbol] = zippedArgs map (_._1)
override def typeArgs: List[Type] = zippedArgs map (_._2)
-
- override protected def typeVarString = (
- zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]")
- )
}
trait UntouchableTypeVar extends TypeVar {
@@ -3057,7 +2955,7 @@ trait Types extends api.Types { self: SymbolTable =>
* Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty.
*/
abstract case class TypeVar(
- val origin: Type,
+ origin: Type,
var constr: TypeConstraint
) extends Type {
@@ -3077,7 +2975,6 @@ trait Types extends api.Types { self: SymbolTable =>
* in operations that are exposed from types. Hence, no syncing of `constr`
* or `encounteredHigherLevel` or `suspended` accesses should be necessary.
*/
-// var constr = constr0
def instValid = constr.instValid
override def isGround = instValid && constr.inst.isGround
@@ -3132,7 +3029,10 @@ trait Types extends api.Types { self: SymbolTable =>
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
def setInst(tp: Type): this.type = {
-// assert(!(tp containsTp this), this)
+ if (tp eq this) {
+ log(s"TypeVar cycle: called setInst passing $this to itself.")
+ return this
+ }
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
@@ -3187,7 +3087,7 @@ trait Types extends api.Types { self: SymbolTable =>
else lhs <:< rhs
}
- /** Simple case: type arguments can be ignored, because either this typevar has
+ /* Simple case: type arguments can be ignored, because either this typevar has
* no type parameters, or we are comparing to Any/Nothing.
*
* The latter condition is needed because HK unification is limited to constraints of the shape
@@ -3214,7 +3114,7 @@ trait Types extends api.Types { self: SymbolTable =>
} else false
}
- /** Full case: involving a check of the form
+ /* Full case: involving a check of the form
* {{{
* TC1[T1,..., TN] <: TC2[T'1,...,T'N]
* }}}
@@ -3281,16 +3181,19 @@ trait Types extends api.Types { self: SymbolTable =>
def registerTypeEquality(tp: Type, typeVarLHS: Boolean): Boolean = {
// println("regTypeEq: "+(safeToString, debugString(tp), tp.getClass, if (typeVarLHS) "in LHS" else "in RHS", if (suspended) "ZZ" else if (constr.instValid) "IV" else "")) //@MDEBUG
-// println("constr: "+ constr)
- def checkIsSameType(tp: Type) =
- if(typeVarLHS) constr.inst =:= tp
- else tp =:= constr.inst
+ def checkIsSameType(tp: Type) = (
+ if (typeVarLHS) constr.inst =:= tp
+ else tp =:= constr.inst
+ )
if (suspended) tp =:= origin
else if (constr.instValid) checkIsSameType(tp)
else isRelatable(tp) && {
val newInst = wildcardToTypeVarMap(tp)
- (constr isWithinBounds newInst) && { setInst(tp); true }
+ (constr isWithinBounds newInst) && {
+ setInst(newInst)
+ true
+ }
}
}
@@ -3301,7 +3204,7 @@ trait Types extends api.Types { self: SymbolTable =>
* (`T` corresponds to @param sym)
*/
def registerTypeSelection(sym: Symbol, tp: Type): Boolean = {
- registerBound(HasTypeMember(sym.name.toTypeName, tp), false)
+ registerBound(HasTypeMember(sym.name.toTypeName, tp), isLowerBound = false)
}
private def isSkolemAboveLevel(tp: Type) = tp.typeSymbol match {
@@ -3352,8 +3255,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sym.owner.isTerm && (sym.owner != encl)) Some(sym.owner) else None
).flatten map (s => s.decodedName + tparamsOfSym(s)) mkString "#"
}
- private def levelString = if (settings.explaintypes.value) level else ""
- protected def typeVarString = originName
+ private def levelString = if (settings.explaintypes) level else ""
override def safeToString = (
if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>"
else if (constr.inst ne NoType) "=?" + constr.inst
@@ -3464,22 +3366,12 @@ trait Types extends api.Types { self: SymbolTable =>
case class NamedType(name: Name, tp: Type) extends Type {
override def safeToString: String = name.toString +": "+ tp
}
-
- /** A De Bruijn index referring to a previous type argument. Only used
- * as a serialization format.
+ /** As with NamedType, used only when calling isApplicable.
+ * Records that the application has a wildcard star (aka _*)
+ * at the end of it.
*/
- case class DeBruijnIndex(level: Int, idx: Int, args: List[Type]) extends Type {
- override def safeToString: String = "De Bruijn index("+level+","+idx+")"
- }
-
- /** A binder defining data associated with De Bruijn indices. Only used
- * as a serialization format.
- */
- case class DeBruijnBinder(pnames: List[Name], ptypes: List[Type], restpe: Type) extends Type {
- override def safeToString = {
- val kind = if (pnames.head.isTypeName) "poly" else "method"
- "De Bruijn "+kind+"("+(pnames mkString ",")+";"+(ptypes mkString ",")+";"+restpe+")"
- }
+ case class RepeatedType(tp: Type) extends Type {
+ override def safeToString: String = tp + ": _*"
}
/** A temporary type representing the erasure of a user-defined value type.
@@ -3524,17 +3416,12 @@ trait Types extends api.Types { self: SymbolTable =>
(if (typeParams.isEmpty) "" else typeParamsString(this)) + super.safeToString
}
- // def mkLazyType(tparams: Symbol*)(f: Symbol => Unit): LazyType = (
- // if (tparams.isEmpty) new LazyType { override def complete(sym: Symbol) = f(sym) }
- // else new LazyPolyType(tparams.toList) { override def complete(sym: Symbol) = f(sym) }
- // )
-
// Creators ---------------------------------------------------------------
/** Rebind symbol `sym` to an overriding member in type `pre`. */
private def rebind(pre: Type, sym: Symbol): Symbol = {
if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
- else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || sym.isStable) orElse sym
+ else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || (sym.isStable && !sym.hasVolatileType)) orElse sym
}
/** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
@@ -3563,7 +3450,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** the canonical creator for a refined type with a given scope */
def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = {
if (phase.erasedTypes)
- if (parents.isEmpty) ObjectClass.tpe else parents.head
+ if (parents.isEmpty) ObjectTpe else parents.head
else {
val clazz = owner.newRefinementClass(pos)
val result = RefinedType(parents, decls, clazz)
@@ -3573,10 +3460,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** The canonical creator for a refined type with an initially empty scope.
- *
- * @param parents ...
- * @param owner ...
- * @return ...
*/
def refinedType(parents: List[Type], owner: Symbol): Type =
refinedType(parents, owner, newScope, owner.pos)
@@ -3685,7 +3568,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (args.isEmpty)
return tycon //@M! `if (args.isEmpty) tycon' is crucial (otherwise we create new types in phases after typer and then they don't get adapted (??))
- /** Disabled - causes cycles in tcpoly tests. */
+ /* Disabled - causes cycles in tcpoly tests. */
if (false && isDefinitionsInitialized) {
assert(isUseableAsTypeArgs(args), {
val tapp_s = s"""$tycon[${args mkString ", "}]"""
@@ -3696,16 +3579,16 @@ trait Types extends api.Types { self: SymbolTable =>
tycon match {
case TypeRef(pre, sym @ (NothingClass|AnyClass), _) => copyTypeRef(tycon, pre, sym, Nil) //@M drop type args to Any/Nothing
- case TypeRef(pre, sym, _) => copyTypeRef(tycon, pre, sym, args)
+ case TypeRef(pre, sym, Nil) => copyTypeRef(tycon, pre, sym, args)
+ case TypeRef(pre, sym, bogons) => devWarning(s"Dropping $bogons from $tycon in appliedType.") ; copyTypeRef(tycon, pre, sym, args)
case PolyType(tparams, restpe) => restpe.instantiateTypeParams(tparams, args)
case ExistentialType(tparams, restpe) => newExistentialType(tparams, appliedType(restpe, args))
case st: SingletonType => appliedType(st.widen, args) // @M TODO: what to do? see bug1
- case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // MO to AM: please check
- case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args))
+ case RefinedType(parents, decls) => RefinedType(parents map (appliedType(_, args)), decls) // @PP: Can this be right?
+ case TypeBounds(lo, hi) => TypeBounds(appliedType(lo, args), appliedType(hi, args)) // @PP: Can this be right?
case tv@TypeVar(_, _) => tv.applyArgs(args)
case AnnotatedType(annots, underlying, self) => AnnotatedType(annots, appliedType(underlying, args), self)
- case ErrorType => tycon
- case WildcardType => tycon // needed for neg/t0226
+ case ErrorType | WildcardType => tycon
case _ => abort(debugString(tycon))
}
}
@@ -3714,25 +3597,6 @@ trait Types extends api.Types { self: SymbolTable =>
def appliedType(tyconSym: Symbol, args: Type*): Type =
appliedType(tyconSym.typeConstructor, args.toList)
- /** A creator for existential types where the type arguments,
- * rather than being applied directly, are interpreted as the
- * upper bounds of unknown types. For instance if the type argument
- * list given is List(AnyRefClass), the resulting type would be
- * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] .
- */
- def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = {
- tycon match {
- case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) =>
- val eparams = typeParamsToExistentials(sym)
- val bounds = args map (TypeBounds upper _)
- foreach2(eparams, bounds)(_ setInfo _)
-
- newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe)))
- case _ =>
- appliedType(tycon, args)
- }
- }
-
/** A creator and extractor for type parameterizations that strips empty type parameter lists.
* Use this factory method to indicate the type has kind * (it's a polymorphic value)
* until we start tracking explicit kinds equivalent to typeFun (except that the latter requires tparams nonEmpty).
@@ -3806,131 +3670,7 @@ trait Types extends api.Types { self: SymbolTable =>
newExistentialType(tparams1, tpe1)
}
- /** Normalize any type aliases within this type (@see Type#normalize).
- * Note that this depends very much on the call to "normalize", not "dealias",
- * so it is no longer carries the too-stealthy name "deAlias".
- */
- object normalizeAliases extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType =>
- def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
- mapOver(logResult(msg)(tp.normalize))
- case _ => mapOver(tp)
- }
- }
-
- /** Remove any occurrence of type <singleton> from this type and its parents */
- object dropSingletonType extends TypeMap {
- def apply(tp: Type): Type = {
- tp match {
- case TypeRef(_, SingletonClass, _) =>
- AnyClass.tpe
- case tp1 @ RefinedType(parents, decls) =>
- parents filter (_.typeSymbol != SingletonClass) match {
- case Nil => AnyClass.tpe
- case p :: Nil if decls.isEmpty => mapOver(p)
- case ps => mapOver(copyRefinedType(tp1, ps, decls))
- }
- case tp1 =>
- mapOver(tp1)
- }
- }
- }
-
- /** Substitutes the empty scope for any non-empty decls in the type. */
- object dropAllRefinements extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case rt @ RefinedType(parents, decls) if !decls.isEmpty =>
- mapOver(copyRefinedType(rt, parents, EmptyScope))
- case ClassInfoType(parents, decls, clazz) if !decls.isEmpty =>
- mapOver(ClassInfoType(parents, EmptyScope, clazz))
- case _ =>
- mapOver(tp)
- }
- }
-
- /** Type with all top-level occurrences of abstract types replaced by their bounds */
- def abstractTypesToBounds(tp: Type): Type = tp match { // @M don't normalize here (compiler loops on pos/bug1090.scala )
- case TypeRef(_, sym, _) if sym.isAbstractType =>
- abstractTypesToBounds(tp.bounds.hi)
- case TypeRef(_, sym, _) if sym.isAliasType =>
- abstractTypesToBounds(tp.normalize)
- case rtp @ RefinedType(parents, decls) =>
- copyRefinedType(rtp, parents mapConserve abstractTypesToBounds, decls)
- case AnnotatedType(_, underlying, _) =>
- abstractTypesToBounds(underlying)
- case _ =>
- tp
- }
-
- // Set to true for A* => Seq[A]
- // (And it will only rewrite A* in method result types.)
- // This is the pre-existing behavior.
- // Or false for Seq[A] => Seq[A]
- // (It will rewrite A* everywhere but method parameters.)
- // This is the specified behavior.
- protected def etaExpandKeepsStar = false
-
- /** Turn any T* types into Seq[T] except when
- * in method parameter position.
- */
- object dropRepeatedParamType extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case MethodType(params, restpe) =>
- // Not mapping over params
- val restpe1 = apply(restpe)
- if (restpe eq restpe1) tp
- else MethodType(params, restpe1)
- case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
- seqType(arg)
- case _ =>
- if (etaExpandKeepsStar) tp else mapOver(tp)
- }
- }
-
- object toDeBruijn extends TypeMap {
- private var paramStack: List[List[Symbol]] = Nil
- def mkDebruijnBinder(params: List[Symbol], restpe: Type) = {
- paramStack = params :: paramStack
- try {
- DeBruijnBinder(params map (_.name), params map (p => this(p.info)), this(restpe))
- } finally paramStack = paramStack.tail
- }
- def apply(tp: Type): Type = tp match {
- case PolyType(tparams, restpe) =>
- mkDebruijnBinder(tparams, restpe)
- case MethodType(params, restpe) =>
- mkDebruijnBinder(params, restpe)
- case TypeRef(NoPrefix, sym, args) =>
- val level = paramStack indexWhere (_ contains sym)
- if (level < 0) mapOver(tp)
- else DeBruijnIndex(level, paramStack(level) indexOf sym, args mapConserve this)
- case _ =>
- mapOver(tp)
- }
- }
- def fromDeBruijn(owner: Symbol) = new TypeMap {
- private var paramStack: List[List[Symbol]] = Nil
- def apply(tp: Type): Type = tp match {
- case DeBruijnBinder(pnames, ptypes, restpe) =>
- val isType = pnames.head.isTypeName
- val newParams = for (name <- pnames) yield
- if (isType) owner.newTypeParameter(name.toTypeName)
- else owner.newValueParameter(name.toTermName)
- paramStack = newParams :: paramStack
- try {
- foreach2(newParams, ptypes)((p, t) => p setInfo this(t))
- val restpe1 = this(restpe)
- if (isType) PolyType(newParams, restpe1)
- else MethodType(newParams, restpe1)
- } finally paramStack = paramStack.tail
- case DeBruijnIndex(level, idx, args) =>
- TypeRef(NoPrefix, paramStack(level)(idx), args map this)
- case _ =>
- mapOver(tp)
- }
- }
// Hash consing --------------------------------------------------------------
@@ -3950,121 +3690,6 @@ trait Types extends api.Types { self: SymbolTable =>
// Helper Classes ---------------------------------------------------------
- /** @PP: Unable to see why these apparently constant types should need vals
- * in every TypeConstraint, I lifted them out.
- */
- private lazy val numericLoBound = IntClass.tpe
- private lazy val numericHiBound = intersectionType(List(ByteClass.tpe, CharClass.tpe), ScalaPackageClass)
-
- /** A class expressing upper and lower bounds constraints of type variables,
- * as well as their instantiations.
- */
- class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
- def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
- def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
- def this() = this(List(), List())
-
- /* Syncnote: Type constraints are assumed to be used from only one
- * thread. They are not exposed in api.Types and are used only locally
- * in operations that are exposed from types. Hence, no syncing of any
- * variables should be ncessesary.
- */
-
- /** Guard these lists against AnyClass and NothingClass appearing,
- * else loBounds.isEmpty will have different results for an empty
- * constraint and one with Nothing as a lower bound. [Actually
- * guarding addLoBound/addHiBound somehow broke raw types so it
- * only guards against being created with them.]
- */
- private var lobounds = lo0 filterNot typeIsNothing
- private var hibounds = hi0 filterNot typeIsAny
- private var numlo = numlo0
- private var numhi = numhi0
- private var avoidWidening = avoidWidening0
-
- def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
- def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
- def avoidWiden: Boolean = avoidWidening
-
- def addLoBound(tp: Type, isNumericBound: Boolean = false) {
- // For some reason which is still a bit fuzzy, we must let Nothing through as
- // a lower bound despite the fact that Nothing is always a lower bound. My current
- // supposition is that the side-effecting type constraint accumulation mechanism
- // depends on these subtype tests being performed to make forward progress when
- // there are mutally recursive type vars.
- // See pos/t6367 and pos/t6499 for the competing test cases.
- val mustConsider = tp.typeSymbol match {
- case NothingClass => true
- case _ => !(lobounds contains tp)
- }
- if (mustConsider) {
- if (isNumericBound && isNumericValueType(tp)) {
- if (numlo == NoType || isNumericSubType(numlo, tp))
- numlo = tp
- else if (!isNumericSubType(tp, numlo))
- numlo = numericLoBound
- }
- else lobounds ::= tp
- }
- }
-
- def checkWidening(tp: Type) {
- if(tp.isStable) avoidWidening = true
- else tp match {
- case HasTypeMember(_, _) => avoidWidening = true
- case _ =>
- }
- }
-
- def addHiBound(tp: Type, isNumericBound: Boolean = false) {
- // My current test case only demonstrates the need to let Nothing through as
- // a lower bound, but I suspect the situation is symmetrical.
- val mustConsider = tp.typeSymbol match {
- case AnyClass => true
- case _ => !(hibounds contains tp)
- }
- if (mustConsider) {
- checkWidening(tp)
- if (isNumericBound && isNumericValueType(tp)) {
- if (numhi == NoType || isNumericSubType(tp, numhi))
- numhi = tp
- else if (!isNumericSubType(numhi, tp))
- numhi = numericHiBound
- }
- else hibounds ::= tp
- }
- }
-
- def isWithinBounds(tp: Type): Boolean =
- lobounds.forall(_ <:< tp) &&
- hibounds.forall(tp <:< _) &&
- (numlo == NoType || (numlo weak_<:< tp)) &&
- (numhi == NoType || (tp weak_<:< numhi))
-
- var inst: Type = NoType // @M reduce visibility?
-
- def instValid = (inst ne null) && (inst ne NoType)
-
- def cloneInternal = {
- val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
- tc.inst = inst
- tc
- }
-
- override def toString = {
- val boundsStr = {
- val lo = loBounds filterNot typeIsNothing
- val hi = hiBounds filterNot typeIsAny
- val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
- val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
-
- lostr ++ histr mkString ("[", " | ", "]")
- }
- if (inst eq NoType) boundsStr
- else boundsStr + " _= " + inst.safeToString
- }
- }
-
class TypeUnwrapper(poly: Boolean, existential: Boolean, annotated: Boolean, nullary: Boolean) extends (Type => Type) {
def apply(tp: Type): Type = tp match {
case AnnotatedType(_, underlying, _) if annotated => apply(underlying)
@@ -4075,310 +3700,33 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
class ClassUnwrapper(existential: Boolean) extends TypeUnwrapper(poly = true, existential, annotated = true, nullary = false) {
- override def apply(tp: Type) = super.apply(tp.normalize)
+ override def apply(tp: Type) = super.apply(tp.normalize) // normalize is required here
}
object unwrapToClass extends ClassUnwrapper(existential = true) { }
object unwrapToStableClass extends ClassUnwrapper(existential = false) { }
object unwrapWrapperTypes extends TypeUnwrapper(true, true, true, true) { }
- trait AnnotationFilter extends TypeMap {
- def keepAnnotation(annot: AnnotationInfo): Boolean
-
- override def mapOver(annot: AnnotationInfo) =
- if (keepAnnotation(annot)) super.mapOver(annot)
- else UnmappableAnnotation
- }
-
- trait KeepOnlyTypeConstraints extends AnnotationFilter {
- // filter keeps only type constraint annotations
- def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
- }
-
- trait VariantTypeMap extends TypeMap {
- private[this] var _variance = 1
-
- override def variance = _variance
- def variance_=(x: Int) = _variance = x
-
- override protected def noChangeToSymbols(origSyms: List[Symbol]) =
- //OPT inline from forall to save on #closures
- origSyms match {
- case sym :: rest =>
- val v = variance
- if (sym.isAliasType) variance = 0
- val result = this(sym.info)
- variance = v
- (result eq sym.info) && noChangeToSymbols(rest)
- case _ =>
- true
- }
-
- override protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- map2Conserve(args, tparams) { (arg, tparam) =>
- val v = variance
- if (tparam.isContravariant) variance = -variance
- else if (!tparam.isCovariant) variance = 0
- val arg1 = this(arg)
- variance = v
- arg1
- }
-
- /** Map this function over given type */
- override def mapOver(tp: Type): Type = tp match {
- case MethodType(params, result) =>
- variance = -variance
- val params1 = mapOver(params)
- variance = -variance
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- variance = -variance
- val tparams1 = mapOver(tparams)
- variance = -variance
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
- case TypeBounds(lo, hi) =>
- variance = -variance
- val lo1 = this(lo)
- variance = -variance
- val hi1 = this(hi)
- if ((lo1 eq lo) && (hi1 eq hi)) tp
- else TypeBounds(lo1, hi1)
- case tr @ TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- val args1 =
- if (args.isEmpty)
- args
- else if (variance == 0) // fast & safe path: don't need to look at typeparams
- args mapConserve this
- else {
- val tparams = sym.typeParams
- if (tparams.isEmpty) args
- else mapOverArgs(args, tparams)
- }
- if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
- case _ =>
- super.mapOver(tp)
- }
- }
-
- // todo. move these into scala.reflect.api
-
- /** A prototype for mapping a function over all possible types
+ /** Repack existential types, otherwise they sometimes get unpacked in the
+ * wrong location (type inference comes up with an unexpected skolem)
*/
- abstract class TypeMap extends (Type => Type) {
- def apply(tp: Type): Type
-
- /** Mix in VariantTypeMap if you want variances to be significant.
- */
- def variance = 0
-
- /** Map this function over given type */
- def mapOver(tp: Type): Type = tp match {
- case tr @ TypeRef(pre, sym, args) =>
- val pre1 = this(pre)
- val args1 = args mapConserve this
- if ((pre1 eq pre) && (args1 eq args)) tp
- else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
- case ThisType(_) => tp
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else singleType(pre1, sym)
- }
- case MethodType(params, result) =>
- val params1 = mapOver(params)
- val result1 = this(result)
- if ((params1 eq params) && (result1 eq result)) tp
- else copyMethodType(tp, params1, result1.substSym(params, params1))
- case PolyType(tparams, result) =>
- val tparams1 = mapOver(tparams)
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else PolyType(tparams1, result1.substSym(tparams, tparams1))
- case NullaryMethodType(result) =>
- val result1 = this(result)
- if (result1 eq result) tp
- else NullaryMethodType(result1)
- case ConstantType(_) => tp
- case SuperType(thistp, supertp) =>
- val thistp1 = this(thistp)
- val supertp1 = this(supertp)
- if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
- else SuperType(thistp1, supertp1)
- case TypeBounds(lo, hi) =>
- val lo1 = this(lo)
- val hi1 = this(hi)
- if ((lo1 eq lo) && (hi1 eq hi)) tp
- else TypeBounds(lo1, hi1)
- case BoundedWildcardType(bounds) =>
- val bounds1 = this(bounds)
- if (bounds1 eq bounds) tp
- else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
- case rtp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve this
- val decls1 = mapOver(decls)
- //if ((parents1 eq parents) && (decls1 eq decls)) tp
- //else refinementOfClass(tp.typeSymbol, parents1, decls1)
- copyRefinedType(rtp, parents1, decls1)
- case ExistentialType(tparams, result) =>
- val tparams1 = mapOver(tparams)
- var result1 = this(result)
- if ((tparams1 eq tparams) && (result1 eq result)) tp
- else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
- case OverloadedType(pre, alts) =>
- val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
- if (pre1 eq pre) tp
- else OverloadedType(pre1, alts)
- case AntiPolyType(pre, args) =>
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- if ((pre1 eq pre) && (args1 eq args)) tp
- else AntiPolyType(pre1, args1)
- case tv@TypeVar(_, constr) =>
- if (constr.instValid) this(constr.inst)
- else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
- case NotNullType(tp) =>
- val tp1 = this(tp)
- if (tp1 eq tp) tp
- else NotNullType(tp1)
- case AnnotatedType(annots, atp, selfsym) =>
- val annots1 = mapOverAnnotations(annots)
- val atp1 = this(atp)
- if ((annots1 eq annots) && (atp1 eq atp)) tp
- else if (annots1.isEmpty) atp1
- else AnnotatedType(annots1, atp1, selfsym)
- case DeBruijnIndex(shift, idx, args) =>
- val args1 = args mapConserve this
- if (args1 eq args) tp
- else DeBruijnIndex(shift, idx, args1)
-/*
- case ErrorType => tp
- case WildcardType => tp
- case NoType => tp
- case NoPrefix => tp
- case ErasedSingleType(sym) => tp
-*/
- case _ =>
- tp
- // throw new Error("mapOver inapplicable for " + tp);
- }
-
- protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] =
- args mapConserve this
-
- /** Called by mapOver to determine whether the original symbols can
- * be returned, or whether they must be cloned. Overridden in VariantTypeMap.
- */
- protected def noChangeToSymbols(origSyms: List[Symbol]) =
- origSyms forall (sym => sym.info eq this(sym.info))
-
- /** Map this function over given scope */
- def mapOver(scope: Scope): Scope = {
- val elems = scope.toList
- val elems1 = mapOver(elems)
- if (elems1 eq elems) scope
- else newScopeWith(elems1: _*)
- }
-
- /** Map this function over given list of symbols */
- def mapOver(origSyms: List[Symbol]): List[Symbol] = {
- // fast path in case nothing changes due to map
- if (noChangeToSymbols(origSyms)) origSyms
- // map is not the identity --> do cloning properly
- else cloneSymbolsAndModify(origSyms, TypeMap.this)
- }
-
- def mapOver(annot: AnnotationInfo): AnnotationInfo = {
- val AnnotationInfo(atp, args, assocs) = annot
- val atp1 = mapOver(atp)
- val args1 = mapOverAnnotArgs(args)
- // there is no need to rewrite assocs, as they are constants
-
- if ((args eq args1) && (atp eq atp1)) annot
- else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
- else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
- }
-
- def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
- val annots1 = annots mapConserve mapOver
- if (annots1 eq annots) annots
- else annots1 filterNot (_ eq UnmappableAnnotation)
- }
-
- /** Map over a set of annotation arguments. If any
- * of the arguments cannot be mapped, then return Nil. */
- def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
- val args1 = args mapConserve mapOver
- if (args1 contains UnmappableTree) Nil
- else args1
- }
-
- def mapOver(tree: Tree): Tree =
- mapOver(tree, () => return UnmappableTree)
-
- /** Map a tree that is part of an annotation argument.
- * If the tree cannot be mapped, then invoke giveup().
- * The default is to transform the tree with
- * TypeMapTransformer.
- */
- def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
- (new TypeMapTransformer).transform(tree)
-
- /** This transformer leaves the tree alone except to remap
- * its types. */
- class TypeMapTransformer extends Transformer {
- override def transform(tree: Tree) = {
- val tree1 = super.transform(tree)
- val tpe1 = TypeMap.this(tree1.tpe)
- if ((tree eq tree1) && (tree.tpe eq tpe1))
- tree
- else
- tree1.shallowDuplicate.setType(tpe1)
- }
- }
- }
+ def repackExistential(tp: Type): Type = (
+ if (tp == NoType) tp
+ else existentialAbstraction(existentialsInType(tp), tp)
+ )
- abstract class TypeTraverser extends TypeMap {
- def traverse(tp: Type): Unit
- def apply(tp: Type): Type = { traverse(tp); tp }
- }
+ def containsExistential(tpe: Type) = tpe exists typeIsExistentiallyBound
+ def existentialsInType(tpe: Type) = tpe withFilter typeIsExistentiallyBound map (_.typeSymbol)
- abstract class TypeTraverserWithResult[T] extends TypeTraverser {
- def result: T
- def clear(): Unit
+ private def isDummyOf(tpe: Type)(targ: Type) = {
+ val sym = targ.typeSymbol
+ sym.isTypeParameter && sym.owner == tpe.typeSymbol
}
-
- abstract class TypeCollector[T](initial: T) extends TypeTraverser {
- var result: T = _
- def collect(tp: Type) = {
- result = initial
- traverse(tp)
- result
- }
+ def isDummyAppliedType(tp: Type) = tp.dealias match {
+ case tr @ TypeRef(_, _, args) => args exists isDummyOf(tr)
+ case _ => false
}
- /** A collector that tests for existential types appearing at given variance in a type
- * @PP: Commenting out due to not being used anywhere.
- */
- // class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) with VariantTypeMap {
- // variance = v
- //
- // def traverse(tp: Type) = tp match {
- // case ExistentialType(_, _) if (variance == v) => result = true
- // case _ => mapOver(tp)
- // }
- // }
- //
- // val containsCovariantExistentialCollector = new ContainsVariantExistentialCollector(1)
- // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1)
-
def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = {
val eparams = mapWithIndex(tparams)((tparam, i) =>
clazz.newExistential(newTypeName("?"+i), clazz.pos) setInfo tparam.info.bounds)
@@ -4388,630 +3736,23 @@ trait Types extends api.Types { self: SymbolTable =>
def typeParamsToExistentials(clazz: Symbol): List[Symbol] =
typeParamsToExistentials(clazz, clazz.typeParams)
+ def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
+ /** Is type tp a ''raw type''? */
// note: it's important to write the two tests in this order,
// as only typeParams forces the classfile to be read. See #400
- private def isRawIfWithoutArgs(sym: Symbol) =
- sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined
-
- def isRaw(sym: Symbol, args: List[Type]) =
- !phase.erasedTypes && isRawIfWithoutArgs(sym) && args.isEmpty
-
- /** Is type tp a ''raw type''? */
- def isRawType(tp: Type) = tp match {
- case TypeRef(_, sym, args) => isRaw(sym, args)
- case _ => false
- }
-
- /** The raw to existential map converts a ''raw type'' to an existential type.
- * It is necessary because we might have read a raw type of a
- * parameterized Java class from a class file. At the time we read the type
- * the corresponding class file might still not be read, so we do not
- * know what the type parameters of the type are. Therefore
- * the conversion of raw types to existential types might not have taken place
- * in ClassFileparser.sigToType (where it is usually done).
- */
- def rawToExistential = new TypeMap {
- private var expanded = immutable.Set[Symbol]()
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
- if (expanded contains sym) AnyRefClass.tpe
- else try {
- expanded += sym
- val eparams = mapOver(typeParamsToExistentials(sym))
- existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
- } finally {
- expanded -= sym
- }
- case _ =>
- mapOver(tp)
- }
- }
-
- /** Used by existentialAbstraction.
- */
- class ExistentialExtrapolation(tparams: List[Symbol]) extends VariantTypeMap {
- private val occurCount = mutable.HashMap[Symbol, Int]()
- private def countOccs(tp: Type) = {
- tp foreach {
- case TypeRef(_, sym, _) =>
- if (tparams contains sym)
- occurCount(sym) += 1
- case _ => ()
- }
- }
- def extrapolate(tpe: Type): Type = {
- tparams foreach (t => occurCount(t) = 0)
- countOccs(tpe)
- for (tparam <- tparams)
- countOccs(tparam.info)
-
- apply(tpe)
- }
-
- def apply(tp: Type): Type = {
- val tp1 = mapOver(tp)
- if (variance == 0) tp1
- else tp1 match {
- case TypeRef(pre, sym, args) if tparams contains sym =>
- val repl = if (variance == 1) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
- //println("eliminate "+sym+"/"+repl+"/"+occurCount(sym)+"/"+(tparams exists (repl.contains)))//DEBUG
- if (!repl.typeSymbol.isBottomClass && occurCount(sym) == 1 && !(tparams exists (repl.contains)))
- repl
- else tp1
- case _ =>
- tp1
- }
- }
- override def mapOver(tp: Type): Type = tp match {
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if ((pre1 eq pre) || !pre1.isStable) tp
- else singleType(pre1, sym)
- }
- case _ => super.mapOver(tp)
- }
-
- // Do not discard the types of existential ident's. The
- // symbol of the Ident itself cannot be listed in the
- // existential's parameters, so the resulting existential
- // type would be ill-formed.
- override def mapOver(tree: Tree) = tree match {
- case Ident(_) if tree.tpe.isStable => tree
- case _ => super.mapOver(tree)
- }
- }
-
- def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
-
- /** Might the given symbol be important when calculating the prefix
- * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
- * the result will be `tp` unchanged if `pre` is trivial and `clazz`
- * is a symbol such that isPossiblePrefix(clazz) == false.
- */
- def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+ def isRawType(tp: Type) = !phase.erasedTypes && (tp match {
+ case TypeRef(_, sym, Nil) => isRawIfWithoutArgs(sym)
+ case _ => false
+ })
- private def skipPrefixOf(pre: Type, clazz: Symbol) = (
- (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ @deprecated("Use isRawType", "2.10.1") // presently used by sbt
+ def isRaw(sym: Symbol, args: List[Type]) = (
+ !phase.erasedTypes
+ && args.isEmpty
+ && isRawIfWithoutArgs(sym)
)
- /** A map to compute the asSeenFrom method */
- class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
- var capturedSkolems: List[Symbol] = List()
- var capturedParams: List[Symbol] = List()
-
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- object annotationArgRewriter extends TypeMapTransformer {
- private def canRewriteThis(sym: Symbol) = (
- (sym isNonBottomSubClass clazz)
- && (pre.widen.typeSymbol isNonBottomSubClass sym)
- && (pre.isStable || giveup())
- )
- // what symbol should really be used?
- private def newTermSym() = {
- val p = pre.typeSymbol
- p.owner.newValue(p.name.toTermName, p.pos) setInfo pre
- }
- /** Rewrite `This` trees in annotation argument trees */
- override def transform(tree: Tree): Tree = super.transform(tree) match {
- case This(_) if canRewriteThis(tree.symbol) => gen.mkAttributedQualifier(pre, newTermSym())
- case tree => tree
- }
- }
- annotationArgRewriter.transform(tree)
- }
-
- def stabilize(pre: Type, clazz: Symbol): Type = {
- capturedParams find (_.owner == clazz) match {
- case Some(qvar) => qvar.tpe
- case _ =>
- val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
- capturedParams ::= qvar
- qvar.tpe
- }
- }
-
- def apply(tp: Type): Type =
- tp match {
- case ThisType(sym) =>
- def toPrefix(pre: Type, clazz: Symbol): Type =
- if (skipPrefixOf(pre, clazz)) tp
- else if ((sym isNonBottomSubClass clazz) &&
- (pre.widen.typeSymbol isNonBottomSubClass sym)) {
- val pre1 = pre match {
- case SuperType(thistp, _) => thistp
- case _ => pre
- }
- if (!(pre1.isStable ||
- pre1.typeSymbol.isPackageClass ||
- pre1.typeSymbol.isModuleClass && pre1.typeSymbol.isStatic)) {
- stabilize(pre1, sym)
- } else {
- pre1
- }
- } else {
- toPrefix(pre.baseType(clazz).prefix, clazz.owner)
- }
- toPrefix(pre, clazz)
- case SingleType(pre, sym) =>
- if (sym.isPackageClass) tp // short path
- else {
- val pre1 = this(pre)
- if (pre1 eq pre) tp
- else if (pre1.isStable) singleType(pre1, sym)
- else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
- }
- // AM: Martin, is this description accurate?
- // walk the owner chain of `clazz` (the original argument to asSeenFrom) until we find the type param's owner (while rewriting pre as we crawl up the owner chain)
- // once we're at the owner, extract the information that pre encodes about the type param,
- // by minimally subsuming pre to the type instance of the class that owns the type param,
- // the type we're looking for is the type instance's type argument at the position corresponding to the type parameter
- // optimisation: skip this type parameter if it's not owned by a class, as those params are not influenced by the prefix through which they are seen
- // (concretely: type params of anonymous type functions, which currently can only arise from normalising type aliases, are owned by the type alias of which they are the eta-expansion)
- // (skolems also aren't affected: they are ruled out by the isTypeParameter check)
- case TypeRef(prefix, sym, args) if (sym.isTypeParameter && sym.owner.isClass) =>
- def toInstance(pre: Type, clazz: Symbol): Type =
- if (skipPrefixOf(pre, clazz)) mapOver(tp)
- //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
- else {
- def throwError = abort("" + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
-
- val symclazz = sym.owner
- if (symclazz == clazz && !pre.widen.isInstanceOf[TypeVar] && (pre.widen.typeSymbol isNonBottomSubClass symclazz)) {
- // have to deconst because it may be a Class[T].
- pre.baseType(symclazz).deconst match {
- case TypeRef(_, basesym, baseargs) =>
-
- def instParam(ps: List[Symbol], as: List[Type]): Type =
- if (ps.isEmpty) {
- if (forInteractive) {
- val saved = settings.uniqid.value
- try {
- settings.uniqid.value = true
- println("*** stale type parameter: " + tp + sym.locationString + " cannot be instantiated from " + pre.widen)
- println("*** confused with params: " + sym + " in " + sym.owner + " not in " + ps + " of " + basesym)
- println("*** stacktrace = ")
- new Error().printStackTrace()
- } finally settings.uniqid.value = saved
- instParamRelaxed(basesym.typeParams, baseargs)
- } else throwError
- } else if (sym eq ps.head)
- // @M! don't just replace the whole thing, might be followed by type application
- appliedType(as.head, args mapConserve (this)) // @M: was as.head
- else instParam(ps.tail, as.tail)
-
- /** Relaxed version of instParams which matches on names not symbols.
- * This is a last fallback in interactive mode because races in calls
- * from the IDE to the compiler may in rare cases lead to symbols referring
- * to type parameters that are no longer current.
- */
- def instParamRelaxed(ps: List[Symbol], as: List[Type]): Type =
- if (ps.isEmpty) throwError
- else if (sym.name == ps.head.name)
- // @M! don't just replace the whole thing, might be followed by type application
- appliedType(as.head, args mapConserve (this)) // @M: was as.head
- else instParamRelaxed(ps.tail, as.tail)
-
- //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG
- if (sameLength(basesym.typeParams, baseargs))
- instParam(basesym.typeParams, baseargs)
- else
- if (symclazz.tpe.parents exists typeIsErroneous)
- ErrorType // don't be to overzealous with throwing exceptions, see #2641
- else
- throw new Error(
- "something is wrong (wrong class file?): "+basesym+
- " with type parameters "+
- basesym.typeParams.map(_.name).mkString("[",",","]")+
- " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase)
- case ExistentialType(tparams, qtpe) =>
- capturedSkolems = capturedSkolems union tparams
- toInstance(qtpe, clazz)
- case t =>
- throwError
- }
- } else toInstance(pre.baseType(clazz).prefix, clazz.owner)
- }
- toInstance(pre, clazz)
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A base class to compute all substitutions */
- abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
- assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
-
- /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
- protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
-
- /** Map target to type, can be tuned by subclasses */
- protected def toType(fromtp: Type, tp: T): Type
-
- protected def renameBoundSyms(tp: Type): Type = tp match {
- case MethodType(ps, restp) =>
- createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
- case PolyType(bs, restp) =>
- createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
- case ExistentialType(bs, restp) =>
- createFromClonedSymbols(bs, restp)(newExistentialType)
- case _ =>
- tp
- }
-
- def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
- @tailrec def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type =
- if (from.isEmpty) tp
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
- else if (matches(from.head, sym)) toType(tp, to.head)
- else subst(tp, sym, from.tail, to.tail)
-
- val boundSyms = tp0.boundSyms
- val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
- val tp = mapOver(tp1)
-
- tp match {
- // @M
- // 1) arguments must also be substituted (even when the "head" of the
- // applied type has already been substituted)
- // example: (subst RBound[RT] from [type RT,type RBound] to
- // [type RT&,type RBound&]) = RBound&[RT&]
- // 2) avoid loops (which occur because alpha-conversion is
- // not performed properly imo)
- // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
- // we must replace the a in Iterable[a] by (a,b)
- // (must not recurse --> loops)
- // 3) replacing m by List in m[Int] should yield List[Int], not just List
- case TypeRef(NoPrefix, sym, args) =>
- appliedType(subst(tp, sym, from, to), args) // if args.isEmpty, appliedType is the identity
- case SingleType(NoPrefix, sym) =>
- subst(tp, sym, from, to)
- case _ =>
- tp
- }
- }
- }
-
- /** A map to implement the `substSym` method. */
- class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
- def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
-
- protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
- case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
- case SingleType(pre, _) => singleType(pre, sym)
- }
- override def apply(tp: Type): Type = if (from.isEmpty) tp else {
- @tailrec def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol =
- if (from.isEmpty) sym
- // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
- else if (matches(from.head, sym)) to.head
- else subst(sym, from.tail, to.tail)
- tp match {
- case TypeRef(pre, sym, args) if pre ne NoPrefix =>
- val newSym = subst(sym, from, to)
- // mapOver takes care of subst'ing in args
- mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
- // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
- case SingleType(pre, sym) if pre ne NoPrefix =>
- val newSym = subst(sym, from, to)
- mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
- case _ =>
- super.apply(tp)
- }
- }
-
- object mapTreeSymbols extends TypeMapTransformer {
- val strictCopy = newStrictTreeCopier
-
- def termMapsTo(sym: Symbol) = from indexOf sym match {
- case -1 => None
- case idx => Some(to(idx))
- }
-
- // if tree.symbol is mapped to another symbol, passes the new symbol into the
- // constructor `trans` and sets the symbol and the type on the resulting tree.
- def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
- case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
- case None => tree
- }
-
- // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
- override def transform(tree: Tree) = {
- // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
- super.transform(tree) match {
- case id @ Ident(_) =>
- transformIfMapped(id)(toSym =>
- strictCopy.Ident(id, toSym.name))
-
- case sel @ Select(qual, name) =>
- transformIfMapped(sel)(toSym =>
- strictCopy.Select(sel, qual, toSym.name))
-
- case tree => tree
- }
- }
- }
- override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
- mapTreeSymbols.transform(tree)
- }
- }
-
- /** A map to implement the `subst` method. */
- class SubstTypeMap(from: List[Symbol], to: List[Type])
- extends SubstMap(from, to) {
- protected def toType(fromtp: Type, tp: Type) = tp
-
- override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
- object trans extends TypeMapTransformer {
- override def transform(tree: Tree) = tree match {
- case Ident(name) =>
- from indexOf tree.symbol match {
- case -1 => super.transform(tree)
- case idx =>
- val totpe = to(idx)
- if (totpe.isStable) tree.duplicate setType totpe
- else giveup()
- }
- case _ =>
- super.transform(tree)
- }
- }
- trans.transform(tree)
- }
- }
-
- /** A map to implement the `substThis` method. */
- class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) if (sym == from) => to
- case _ => mapOver(tp)
- }
- }
-
- class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
- def apply(tp: Type): Type = try {
- tp match {
- case TypeRef(_, sym, _) if from contains sym =>
- BoundedWildcardType(sym.info.bounds)
- case _ =>
- mapOver(tp)
- }
- } catch {
- case ex: MalformedType =>
- WildcardType
- }
- }
-
-// dependent method types
- object IsDependentCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (tp.isImmediatelyDependent) result = true
- else if (!result) mapOver(tp)
- }
- }
-
- object ApproximateDependentMap extends TypeMap {
- def apply(tp: Type): Type =
- if (tp.isImmediatelyDependent) WildcardType
- else mapOver(tp)
- }
-
- /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
- */
- class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
- private val actuals = actuals0.toIndexedSeq
- private val existentials = new Array[Symbol](actuals.size)
- def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
-
- private object StableArg {
- def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
- tp.isStable && (tp.typeSymbol != NothingClass)
- )
- }
- private object Arg {
- def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
- }
-
- def apply(tp: Type): Type = mapOver(tp) match {
- // unsound to replace args by unstable actual #3873
- case SingleType(NoPrefix, StableArg(arg)) => arg
- // (soundly) expand type alias selections on implicit arguments,
- // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
- case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
- val arg = actuals(pid)
- val res = typeRef(arg, sym, targs)
- if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
- // don't return the original `tp`, which may be different from `tp1`,
- // due to dropping annotations
- case tp1 => tp1
- }
-
- /* Return the type symbol for referencing a parameter inside the existential quantifier.
- * (Only needed if the actual is unstable.)
- */
- private def existentialFor(pid: Int) = {
- if (existentials(pid) eq null) {
- val param = params(pid)
- existentials(pid) = (
- param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
- setInfo singletonBounds(actuals(pid))
- )
- }
- existentials(pid)
- }
-
- //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
- override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
- // TODO: this should be simplified; in the stable case, one can
- // probably just use an Ident to the tree.symbol.
- //
- // @PP: That leads to failure here, where stuff no longer has type
- // 'String @Annot("stuff")' but 'String @Annot(x)'.
- //
- // def m(x: String): String @Annot(x) = x
- // val stuff = m("stuff")
- //
- // (TODO cont.) Why an existential in the non-stable case?
- //
- // @PP: In the following:
- //
- // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
- //
- // m is typed as 'String @Annot(x) forSome { val x: String }'.
- //
- // Both examples are from run/constrained-types.scala.
- object treeTrans extends Transformer {
- override def transform(tree: Tree): Tree = tree.symbol match {
- case StableArg(actual) =>
- gen.mkAttributedQualifier(actual, tree.symbol)
- case Arg(pid) =>
- val sym = existentialFor(pid)
- Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
- case _ =>
- super.transform(tree)
- }
- }
- treeTrans transform arg
- }
- }
-
- object StripAnnotationsMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case AnnotatedType(_, atp, _) =>
- mapOver(atp)
- case tp =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a wildcard type to a fresh
- * type variable */
- object wildcardToTypeVarMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case WildcardType =>
- TypeVar(tp, new TypeConstraint)
- case BoundedWildcardType(bounds) =>
- TypeVar(tp, new TypeConstraint(bounds))
- case _ =>
- mapOver(tp)
- }
- }
-
- /** A map to convert every occurrence of a type variable to a wildcard type. */
- object typeVarToOriginMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeVar(origin, _) => origin
- case _ => mapOver(tp)
- }
- }
-
- /** A map to implement the `contains` method. */
- class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- tp.normalize match {
- case TypeRef(_, sym1, _) if (sym == sym1) => result = true
- case SingleType(_, sym1) if (sym == sym1) => result = true
- case _ => mapOver(tp)
- }
- }
- }
-
- override def mapOver(arg: Tree) = {
- for (t <- arg) {
- traverse(t.tpe)
- if (t.symbol == sym)
- result = true
- }
- arg
- }
- }
-
- /** A map to implement the `contains` method. */
- class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- if (tp eq t) result = true
- else mapOver(tp)
- }
- }
- override def mapOver(arg: Tree) = {
- for (t <- arg)
- traverse(t.tpe)
-
- arg
- }
- }
-
- /** A map to implement the `filter` method. */
- class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
- def withFilter(q: Type => Boolean) = new FilterTypeCollector(tp => p(tp) && q(tp))
-
- override def collect(tp: Type) = super.collect(tp).reverse
-
- def traverse(tp: Type) {
- if (p(tp)) result ::= tp
- mapOver(tp)
- }
- }
-
- /** A map to implement the `collect` method. */
- class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
- override def collect(tp: Type) = super.collect(tp).reverse
-
- def traverse(tp: Type) {
- if (pf.isDefinedAt(tp)) result ::= pf(tp)
- mapOver(tp)
- }
- }
-
- class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
- def traverse(tp: Type) {
- f(tp)
- mapOver(tp)
- }
- }
-
- /** A map to implement the `filter` method. */
- class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
- def traverse(tp: Type) {
- if (result.isEmpty) {
- if (p(tp)) result = Some(tp)
- mapOver(tp)
- }
- }
- }
-
- /** A map to implement the `contains` method. */
- object ErroneousCollector extends TypeCollector(false) {
- def traverse(tp: Type) {
- if (!result) {
- result = tp.isError
- mapOver(tp)
- }
- }
- }
+ def singletonBounds(hi: Type) = TypeBounds.upper(intersectionType(List(hi, SingletonClass.tpe)))
/**
* A more persistent version of `Type#memberType` which does not require
@@ -5059,204 +3800,10 @@ trait Types extends api.Types { self: SymbolTable =>
result
}
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given type.
- */
- private def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
-
- /** The most deeply nested owner that contains all the symbols
- * of thistype or prefixless typerefs/singletype occurrences in given list
- * of types.
- */
- private def commonOwner(tps: List[Type]): Symbol = {
- if (tps.isEmpty) NoSymbol
- else {
- commonOwnerMap.clear()
- tps foreach (commonOwnerMap traverse _)
- if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
- }
- }
-
- protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
-
- protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
- var result: Symbol = _
-
- def clear() { result = null }
-
- private def register(sym: Symbol) {
- // First considered type is the trivial result.
- if ((result eq null) || (sym eq NoSymbol))
- result = sym
- else
- while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
- result = result.owner
- }
- def traverse(tp: Type) = tp.normalize match {
- case ThisType(sym) => register(sym)
- case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
- case SingleType(NoPrefix, sym) => register(sym.owner)
- case _ => mapOver(tp)
- }
- }
-
- private lazy val commonOwnerMapObj = new CommonOwnerMap
-
class MissingAliasControl extends ControlThrowable
val missingAliasException = new MissingAliasControl
class MissingTypeControl extends ControlThrowable
- object adaptToNewRunMap extends TypeMap {
-
- private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
- if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
- sym
- else if (sym.isModuleClass) {
- val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
-
- sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
- val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
- debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
- sym
- }
- }
- else {
- var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse {
- if (sym.isAliasType) throw missingAliasException
- debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase)
- throw new MissingTypeControl // For build manager and presentation compiler purposes
- }
- /** The two symbols have the same fully qualified name */
- def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
- sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
- if (!corresponds(sym.owner, rebind0.owner)) {
- debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
- val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner));
- if (bcs.isEmpty)
- assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
- else
- rebind0 = pre.baseType(bcs.head).member(sym.name)
- debuglog(
- "ADAPT2 pre = " + pre +
- ", bcs.head = " + bcs.head +
- ", sym = " + sym.fullLocationString +
- ", rebind = " + rebind0.fullLocationString
- )
- }
- rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
- debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
- throw new MalformedType(pre, sym.nameString)
- }
- }
- }
- def apply(tp: Type): Type = tp match {
- case ThisType(sym) =>
- try {
- val sym1 = adaptToNewRun(sym.owner.thisType, sym)
- if (sym1 == sym) tp else ThisType(sym1)
- } catch {
- case ex: MissingTypeControl =>
- tp
- }
- case SingleType(pre, sym) =>
- if (sym.isPackage) tp
- else {
- val pre1 = this(pre)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym)) tp
- else singleType(pre1, sym1)
- } catch {
- case _: MissingTypeControl =>
- tp
- }
- }
- case TypeRef(pre, sym, args) =>
- if (sym.isPackageClass) tp
- else {
- val pre1 = this(pre)
- val args1 = args mapConserve (this)
- try {
- val sym1 = adaptToNewRun(pre1, sym)
- if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
- tp
- } else if (sym1 == NoSymbol) {
- debugwarn("adapt fail: "+pre+" "+pre1+" "+sym)
- tp
- } else {
- copyTypeRef(tp, pre1, sym1, args1)
- }
- } catch {
- case ex: MissingAliasControl =>
- apply(tp.dealias)
- case _: MissingTypeControl =>
- tp
- }
- }
- case MethodType(params, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else copyMethodType(tp, params, restp1)
- case NullaryMethodType(restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else NullaryMethodType(restp1)
- case PolyType(tparams, restp) =>
- val restp1 = this(restp)
- if (restp1 eq restp) tp
- else PolyType(tparams, restp1)
-
- // Lukas: we need to check (together) whether we should also include parameter types
- // of PolyType and MethodType in adaptToNewRun
-
- case ClassInfoType(parents, decls, clazz) =>
- if (clazz.isPackageClass) tp
- else {
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else ClassInfoType(parents1, decls, clazz)
- }
- case RefinedType(parents, decls) =>
- val parents1 = parents mapConserve (this)
- if (parents1 eq parents) tp
- else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
- case SuperType(_, _) => mapOver(tp)
- case TypeBounds(_, _) => mapOver(tp)
- case TypeVar(_, _) => mapOver(tp)
- case AnnotatedType(_,_,_) => mapOver(tp)
- case NotNullType(_) => mapOver(tp)
- case ExistentialType(_, _) => mapOver(tp)
- case _ => tp
- }
- }
-
- class SubTypePair(val tp1: Type, val tp2: Type) {
- override def hashCode = tp1.hashCode * 41 + tp2.hashCode
- override def equals(other: Any) = other match {
- case stp: SubTypePair =>
- // suspend TypeVars in types compared by =:=,
- // since we don't want to mutate them simply to check whether a subtype test is pending
- // in addition to making subtyping "more correct" for type vars,
- // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
- // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
- def suspend(tp: Type) =
- if (tp.isGround) null else suspendTypeVarsInType(tp)
- def revive(suspension: List[TypeVar]) =
- if (suspension ne null) suspension foreach (_.suspended = false)
-
- val suspensions = Array(tp1, stp.tp1, tp2, stp.tp2) map suspend
-
- val sameTypes = (tp1 =:= stp.tp1) && (tp2 =:= stp.tp2)
-
- suspensions foreach revive
-
- sameTypes
- case _ =>
- false
- }
- override def toString = tp1+" <:<? "+tp2
- }
-
// Helper Methods -------------------------------------------------------------
/** The maximum allowable depth of lubs or glbs over types `ts`.
@@ -5272,47 +3819,15 @@ trait Types extends api.Types { self: SymbolTable =>
* the maximum depth `bd` of all types in the base type sequences of these types.
*/
private def lubDepthAdjust(td: Int, bd: Int): Int =
- if (settings.XfullLubs.value) bd
+ if (settings.XfullLubs) bd
else if (bd <= 3) bd
else if (bd <= 5) td max (bd - 1)
else if (bd <= 7) td max (bd - 2)
else (td - 1) max (bd - 3)
- /** The maximum depth of type `tp` */
- def typeDepth(tp: Type): Int = tp match {
- case TypeRef(pre, sym, args) =>
- typeDepth(pre) max typeDepth(args) + 1
- case RefinedType(parents, decls) =>
- typeDepth(parents) max typeDepth(decls.toList.map(_.info)) + 1
- case TypeBounds(lo, hi) =>
- typeDepth(lo) max typeDepth(hi)
- case MethodType(paramtypes, result) =>
- typeDepth(result)
- case NullaryMethodType(result) =>
- typeDepth(result)
- case PolyType(tparams, result) =>
- typeDepth(result) max typeDepth(tparams map (_.info)) + 1
- case ExistentialType(tparams, result) =>
- typeDepth(result) max typeDepth(tparams map (_.info)) + 1
- case _ =>
- 1
- }
-
- private def maxDepth(tps: List[Type], by: Type => Int): Int = {
- //OPT replaced with tailrecursive function to save on #closures
- // was:
- // var d = 0
- // for (tp <- tps) d = d max by(tp) //!!!OPT!!!
- // d
- def loop(tps: List[Type], acc: Int): Int = tps match {
- case tp :: rest => loop(rest, acc max by(tp))
- case _ => acc
- }
- loop(tps, 0)
- }
-
- private def typeDepth(tps: List[Type]): Int = maxDepth(tps, typeDepth)
- private def baseTypeSeqDepth(tps: List[Type]): Int = maxDepth(tps, _.baseTypeSeqDepth)
+ private def symTypeDepth(syms: List[Symbol]): Int = typeDepth(syms map (_.info))
+ private def typeDepth(tps: List[Type]): Int = maxDepth(tps)
+ private def baseTypeSeqDepth(tps: List[Type]): Int = maxBaseTypeSeqDepth(tps)
/** Is intersection of given types populated? That is,
* for all types tp1, tp2 in intersection
@@ -5325,32 +3840,35 @@ trait Types extends api.Types { self: SymbolTable =>
def isPopulated(tp1: Type, tp2: Type): Boolean = {
def isConsistent(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match {
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- assert(sym1 == sym2)
- pre1 =:= pre2 &&
- forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
- //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
- if (tparam.variance == 0) arg1 =:= arg2
- else if (arg1.isInstanceOf[TypeVar])
- // if left-hand argument is a typevar, make it compatible with variance
- // this is for more precise pattern matching
- // todo: work this in the spec of this method
- // also: think what happens if there are embedded typevars?
- if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
- else true
- }
+ assert(sym1 == sym2, (sym1, sym2))
+ ( pre1 =:= pre2
+ && forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) =>
+ // if left-hand argument is a typevar, make it compatible with variance
+ // this is for more precise pattern matching
+ // todo: work this in the spec of this method
+ // also: think what happens if there are embedded typevars?
+ if (tparam.variance.isInvariant)
+ arg1 =:= arg2
+ else !arg1.isInstanceOf[TypeVar] || {
+ if (tparam.variance.isContravariant) arg1 <:< arg2
+ else arg2 <:< arg1
+ }
+ }
+ )
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
et.withTypeVars(isConsistent(tp1, _))
}
- def check(tp1: Type, tp2: Type) =
+ def check(tp1: Type, tp2: Type) = (
if (tp1.typeSymbol.isClass && tp1.typeSymbol.hasFlag(FINAL))
tp1 <:< tp2 || isNumericValueClass(tp1.typeSymbol) && isNumericValueClass(tp2.typeSymbol)
else tp1.baseClasses forall (bc =>
tp2.baseTypeIndex(bc) < 0 || isConsistent(tp1.baseType(bc), tp2.baseType(bc)))
+ )
- check(tp1, tp2)/* && check(tp2, tp1)*/ // need to investgate why this can't be made symmetric -- neg/gadts1 fails, and run/existials also.
+ check(tp1, tp2) && check(tp2, tp1)
}
/** Does a pattern of type `patType` need an outer test when executed against
@@ -5393,85 +3911,14 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- private var subsametypeRecursions: Int = 0
-
- private def isUnifiable(pre1: Type, pre2: Type) =
- (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
-
- /** Returns true iff we are past phase specialize,
- * sym1 and sym2 are two existential skolems with equal names and bounds,
- * and pre1 and pre2 are equal prefixes
- */
- private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
- sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
- sym1.name == sym2.name &&
- phase.specialized &&
- sym1.info =:= sym2.info &&
- pre1 =:= pre2
- }
-
- private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
- if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
- if (settings.debug.value) println(s"new isSubPre $sym: $pre1 <:< $pre2")
- true
- } else
- false
-
- private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean =
- if (sym1 == sym2) sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
- else (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
-
- /** Do `tp1` and `tp2` denote equivalent types? */
- def isSameType(tp1: Type, tp2: Type): Boolean = try {
- if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
- subsametypeRecursions += 1
- //OPT cutdown on Function0 allocation
- //was:
-// undoLog undoUnless {
-// isSameType1(tp1, tp2)
-// }
-
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = {
- isSameType1(tp1, tp2)
- } finally if (!result) undoLog.undoTo(before)
- result
- } finally undoLog.unlock()
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
- subsametypeRecursions += 1
- undoLog undo { // undo type constraints that arise from operations in this block
- !isSameType1(tp1, tp2)
- }
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
-
- def isDifferentTypeConstructor(tp1: Type, tp2: Type): Boolean = tp1 match {
- case TypeRef(pre1, sym1, _) =>
- tp2 match {
- case TypeRef(pre2, sym2, _) => sym1 != sym2 || isDifferentType(pre1, pre2)
- case _ => true
- }
- case _ => true
- }
-
- def normalizePlus(tp: Type) =
+ def normalizePlus(tp: Type) = (
if (isRawType(tp)) rawToExistential(tp)
- else tp.normalize
+ else tp.normalize match {
+ // Unify the two representations of module classes
+ case st @ SingleType(_, sym) if sym.isModule => st.underlying.normalize
+ case _ => tp.normalize
+ }
+ )
/*
todo: change to:
@@ -5483,288 +3930,7 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => tp.normalize
}
*/
-/*
- private def isSameType0(tp1: Type, tp2: Type): Boolean = {
- if (tp1 eq tp2) return true
- ((tp1, tp2) match {
- case (ErrorType, _) => true
- case (WildcardType, _) => true
- case (_, ErrorType) => true
- case (_, WildcardType) => true
-
- case (NoType, _) => false
- case (NoPrefix, _) => tp2.typeSymbol.isPackageClass
- case (_, NoType) => false
- case (_, NoPrefix) => tp1.typeSymbol.isPackageClass
-
- case (ThisType(sym1), ThisType(sym2))
- if (sym1 == sym2) =>
- true
- case (SingleType(pre1, sym1), SingleType(pre2, sym2))
- if (equalSymsAndPrefixes(sym1, pre1, sym2, pre2)) =>
- true
-/*
- case (SingleType(pre1, sym1), ThisType(sym2))
- if (sym1.isModule &&
- sym1.moduleClass == sym2 &&
- pre1 =:= sym2.owner.thisType) =>
- true
- case (ThisType(sym1), SingleType(pre2, sym2))
- if (sym2.isModule &&
- sym2.moduleClass == sym1 &&
- pre2 =:= sym1.owner.thisType) =>
- true
-*/
- case (ConstantType(value1), ConstantType(value2)) =>
- value1 == value2
- case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
- equalSymsAndPrefixes(sym1, pre1, sym2, pre2) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(args1, args2))
- // @M! normalize reduces higher-kinded case to PolyType's
- case (RefinedType(parents1, ref1), RefinedType(parents2, ref2)) =>
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner.thisType)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- isSameTypes(parents1, parents2) && isSubScope(ref1, ref2) && isSubScope(ref2, ref1)
- case (MethodType(params1, res1), MethodType(params2, res2)) =>
- // new dependent types: probably fix this, use substSym as done for PolyType
- (isSameTypes(tp1.paramTypes, tp2.paramTypes) &&
- res1 =:= res2 &&
- tp1.isImplicit == tp2.isImplicit)
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- // assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
- case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length) && (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) && // @M looks like it might suffer from same problem as #2210
- res1 =:= res2.substSym(tparams2, tparams1)
- case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
- lo1 =:= lo2 && hi1 =:= hi2
- case (BoundedWildcardType(bounds), _) =>
- bounds containsType tp2
- case (_, BoundedWildcardType(bounds)) =>
- bounds containsType tp1
- case (tv @ TypeVar(_,_), tp) =>
- tv.registerTypeEquality(tp, true)
- case (tp, tv @ TypeVar(_,_)) =>
- tv.registerTypeEquality(tp, false)
- case (AnnotatedType(_,_,_), _) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case (_, AnnotatedType(_,_,_)) =>
- annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case (_: SingletonType, _: SingletonType) =>
- var origin1 = tp1
- while (origin1.underlying.isInstanceOf[SingletonType]) {
- assert(origin1 ne origin1.underlying, origin1)
- origin1 = origin1.underlying
- }
- var origin2 = tp2
- while (origin2.underlying.isInstanceOf[SingletonType]) {
- assert(origin2 ne origin2.underlying, origin2)
- origin2 = origin2.underlying
- }
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
-*/
- private def isSameType1(tp1: Type, tp2: Type): Boolean = {
- if ((tp1 eq tp2) ||
- (tp1 eq ErrorType) || (tp1 eq WildcardType) ||
- (tp2 eq ErrorType) || (tp2 eq WildcardType))
- true
- else if ((tp1 eq NoType) || (tp2 eq NoType))
- false
- else if (tp1 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp2.typeSymbol.isPackageClass
- else if (tp2 eq NoPrefix) // !! I do not see how this would be warranted by the spec
- tp1.typeSymbol.isPackageClass
- else {
- isSameType2(tp1, tp2) || {
- val tp1n = normalizePlus(tp1)
- val tp2n = normalizePlus(tp2)
- ((tp1n ne tp1) || (tp2n ne tp2)) && isSameType(tp1n, tp2n)
- }
- }
- }
- def isSameType2(tp1: Type, tp2: Type): Boolean = {
- tp1 match {
- case tr1: TypeRef =>
- tp2 match {
- case tr2: TypeRef =>
- return (equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre) &&
- ((tp1.isHigherKinded && tp2.isHigherKinded && tp1.normalize =:= tp2.normalize) ||
- isSameTypes(tr1.args, tr2.args))) ||
- ((tr1.pre, tr2.pre) match {
- case (tv @ TypeVar(_,_), _) => tv.registerTypeSelection(tr1.sym, tr2)
- case (_, tv @ TypeVar(_,_)) => tv.registerTypeSelection(tr2.sym, tr1)
- case _ => false
- })
- case _: SingleType =>
- return isSameType2(tp2, tp1) // put singleton type on the left, caught below
- case _ =>
- }
- case tt1: ThisType =>
- tp2 match {
- case tt2: ThisType =>
- if (tt1.sym == tt2.sym) return true
- case _ =>
- }
- case st1: SingleType =>
- tp2 match {
- case st2: SingleType =>
- if (equalSymsAndPrefixes(st1.sym, st1.pre, st2.sym, st2.pre)) return true
- case TypeRef(pre2, sym2, Nil) =>
- if (sym2.isModuleClass && equalSymsAndPrefixes(st1.sym, st1.pre, sym2.sourceModule, pre2)) return true
- case _ =>
- }
- case ct1: ConstantType =>
- tp2 match {
- case ct2: ConstantType =>
- return (ct1.value == ct2.value)
- case _ =>
- }
- case rt1: RefinedType =>
- tp2 match {
- case rt2: RefinedType => //
- def isSubScope(s1: Scope, s2: Scope): Boolean = s2.toList.forall {
- sym2 =>
- var e1 = s1.lookupEntry(sym2.name)
- (e1 ne null) && {
- val substSym = sym2.info.substThis(sym2.owner, e1.sym.owner)
- var isEqual = false
- while (!isEqual && (e1 ne null)) {
- isEqual = e1.sym.info =:= substSym
- e1 = s1.lookupNextEntry(e1)
- }
- isEqual
- }
- }
- //Console.println("is same? " + tp1 + " " + tp2 + " " + tp1.typeSymbol.owner + " " + tp2.typeSymbol.owner)//DEBUG
- return isSameTypes(rt1.parents, rt2.parents) && {
- val decls1 = rt1.decls
- val decls2 = rt2.decls
- isSubScope(decls1, decls2) && isSubScope(decls2, decls1)
- }
- case _ =>
- }
- case mt1: MethodType =>
- tp2 match {
- case mt2: MethodType =>
- return isSameTypes(mt1.paramTypes, mt2.paramTypes) &&
- mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params) &&
- mt1.isImplicit == mt2.isImplicit
- // note: no case NullaryMethodType(restpe) => return mt1.params.isEmpty && mt1.resultType =:= restpe
- case _ =>
- }
- case NullaryMethodType(restpe1) =>
- tp2 match {
- // note: no case mt2: MethodType => return mt2.params.isEmpty && restpe =:= mt2.resultType
- case NullaryMethodType(restpe2) =>
- return restpe1 =:= restpe2
- case _ =>
- }
- case PolyType(tparams1, res1) =>
- tp2 match {
- case PolyType(tparams2, res2) =>
-// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
- // @M looks like it might suffer from same problem as #2210
- return (
- (sameLength(tparams1, tparams2)) && // corresponds does not check length of two sequences before checking the predicate
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case ExistentialType(tparams1, res1) =>
- tp2 match {
- case ExistentialType(tparams2, res2) =>
- // @M looks like it might suffer from same problem as #2210
- return (
- // corresponds does not check length of two sequences before checking the predicate -- faster & needed to avoid crasher in #2956
- sameLength(tparams1, tparams2) &&
- (tparams1 corresponds tparams2)(_.info =:= _.info.substSym(tparams2, tparams1)) &&
- res1 =:= res2.substSym(tparams2, tparams1)
- )
- case _ =>
- }
- case TypeBounds(lo1, hi1) =>
- tp2 match {
- case TypeBounds(lo2, hi2) =>
- return lo1 =:= lo2 && hi1 =:= hi2
- case _ =>
- }
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp2
- case _ =>
- }
- tp2 match {
- case BoundedWildcardType(bounds) =>
- return bounds containsType tp1
- case _ =>
- }
- tp1 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp2, true)
- case _ =>
- }
- tp2 match {
- case tv @ TypeVar(_,_) =>
- return tv.registerTypeEquality(tp1, false)
- case _ =>
- }
- tp1 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp2 match {
- case _: AnnotatedType =>
- return annotationsConform(tp1, tp2) && annotationsConform(tp2, tp1) && tp1.withoutAnnotations =:= tp2.withoutAnnotations
- case _ =>
- }
- tp1 match {
- case _: SingletonType =>
- tp2 match {
- case _: SingletonType =>
- def chaseDealiasedUnderlying(tp: Type): Type = {
- var origin = tp
- var next = origin.underlying.dealias
- while (next.isInstanceOf[SingletonType]) {
- assert(origin ne next, origin)
- origin = next
- next = origin.underlying.dealias
- }
- origin
- }
- val origin1 = chaseDealiasedUnderlying(tp1)
- val origin2 = chaseDealiasedUnderlying(tp2)
- ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
- case _ =>
- false
- }
- case _ =>
- false
- }
- }
/** Are `tps1` and `tps2` lists of pairwise equivalent types? */
def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _)
@@ -5782,64 +3948,9 @@ trait Types extends api.Types { self: SymbolTable =>
*/
final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
- private val pendingSubTypes = new mutable.HashSet[SubTypePair]
private var basetypeRecursions: Int = 0
private val pendingBaseTypes = new mutable.HashSet[Type]
- def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
-
- def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
- subsametypeRecursions += 1
-
- //OPT cutdown on Function0 allocation
- //was:
-// undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
-// if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
-// val p = new SubTypePair(tp1, tp2)
-// if (pendingSubTypes(p))
-// false
-// else
-// try {
-// pendingSubTypes += p
-// isSubType2(tp1, tp2, depth)
-// } finally {
-// pendingSubTypes -= p
-// }
-// } else {
-// isSubType2(tp1, tp2, depth)
-// }
-// }
-
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType2(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType2(tp1, tp2, depth)
- }
- } finally if (!result) undoLog.undoTo(before)
-
- result
- } finally undoLog.unlock()
- } finally {
- subsametypeRecursions -= 1
- // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
- // it doesn't help to keep separate recursion counts for the three methods that now share it
- // if (subsametypeRecursions == 0) undoLog.clear()
- }
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
@@ -5857,18 +3968,6 @@ trait Types extends api.Types { self: SymbolTable =>
false
}
- @deprecated("The compiler doesn't use this so you shouldn't either - it will be removed", "2.10.0")
- def instTypeVar(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) =>
- copyTypeRef(tp, instTypeVar(pre), sym, args)
- case SingleType(pre, sym) =>
- singleType(instTypeVar(pre), sym)
- case TypeVar(_, constr) =>
- instTypeVar(constr.inst)
- case _ =>
- tp
- }
-
def isErrorOrWildcard(tp: Type) = (tp eq ErrorType) || (tp eq WildcardType)
def isSingleType(tp: Type) = tp match {
@@ -5881,6 +3980,16 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => false
}
+ def isExistentialType(tp: Type): Boolean = tp.dealias match {
+ case ExistentialType(_, _) => true
+ case _ => false
+ }
+
+ def isImplicitMethodType(tp: Type) = tp match {
+ case mt: MethodType => mt.isImplicit
+ case _ => false
+ }
+
/** This is defined and named as it is because the goal is to exclude source
* level types which are not value types (e.g. MethodType) without excluding
* necessary internal types such as WildcardType. There are also non-value
@@ -5906,10 +4015,11 @@ trait Types extends api.Types { self: SymbolTable =>
* types which are used internally in type applications and
* types which are not.
*/
+ /**** Not used right now, but kept around to document which Types
+ * land in which bucket.
private def isInternalTypeNotUsedAsTypeArg(tp: Type): Boolean = tp match {
case AntiPolyType(pre, targs) => true
case ClassInfoType(parents, defs, clazz) => true
- case DeBruijnIndex(level, index, args) => true
case ErasedValueType(tref) => true
case NoPrefix => true
case NoType => true
@@ -5917,6 +4027,7 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(lo, hi) => true
case _ => false
}
+ ****/
private def isInternalTypeUsedAsTypeArg(tp: Type): Boolean = tp match {
case WildcardType => true
case BoundedWildcardType(_) => true
@@ -5962,7 +4073,7 @@ trait Types extends api.Types { self: SymbolTable =>
* useful as documentation; it is likely that !isNonValueType(tp)
* will serve better than isValueType(tp).
*/
- def isValueType(tp: Type) = isValueElseNonValue(tp)
+ /** def isValueType(tp: Type) = isValueElseNonValue(tp) */
/** SLS 3.3, Non-Value Types
* Is the given type definitely a non-value type, as defined in SLS 3.3?
@@ -5973,7 +4084,7 @@ trait Types extends api.Types { self: SymbolTable =>
* not designated non-value types because there is code which depends on using
* them as type arguments, but their precise status is unclear.
*/
- def isNonValueType(tp: Type) = !isValueElseNonValue(tp)
+ /** def isNonValueType(tp: Type) = !isValueElseNonValue(tp) */
def isNonRefinementClassType(tpe: Type) = tpe match {
case SingleType(_, sym) => sym.isModuleClass
@@ -5982,285 +4093,41 @@ trait Types extends api.Types { self: SymbolTable =>
case _ => false
}
- // @assume tp1.isHigherKinded || tp2.isHigherKinded
- def isHKSubType0(tp1: Type, tp2: Type, depth: Int): Boolean = (
- tp1.typeSymbol == NothingClass
- ||
- tp2.typeSymbol == AnyClass // @M Any and Nothing are super-type resp. subtype of every well-kinded type
- || // @M! normalize reduces higher-kinded case to PolyType's
- ((tp1.normalize.withoutAnnotations , tp2.normalize.withoutAnnotations) match {
- case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => // @assume tp1.isHigherKinded && tp2.isHigherKinded (as they were both normalized to PolyType)
- sameLength(tparams1, tparams2) && {
- if (tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
- (tparams1 corresponds tparams2)((p1, p2) => p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
- res1 <:< res2.substSym(tparams2, tparams1)
- } else { // normalized higher-kinded type
- //@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
- val tpsFresh = cloneSymbols(tparams1)
-
- (tparams1 corresponds tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
- res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
-
- //@M the forall in the previous test could be optimised to the following,
- // but not worth the extra complexity since it only shaves 1s from quick.comp
- // (List.forall2(tpsFresh/*optimisation*/, tparams2)((p1, p2) =>
- // p2.info.substSym(tparams2, tpsFresh) <:< p1.info /*optimisation, == (p1 from tparams1).info.substSym(tparams1, tpsFresh)*/) &&
- // this optimisation holds because inlining cloneSymbols in `val tpsFresh = cloneSymbols(tparams1)` gives:
- // val tpsFresh = tparams1 map (_.cloneSymbol)
- // for (tpFresh <- tpsFresh) tpFresh.setInfo(tpFresh.info.substSym(tparams1, tpsFresh))
- }
- } && annotationsConform(tp1.normalize, tp2.normalize)
- case (_, _) => false // @assume !tp1.isHigherKinded || !tp2.isHigherKinded
- // --> thus, cannot be subtypes (Any/Nothing has already been checked)
- }))
-
def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Int): Boolean = {
- def isSubArg(t1: Type, t2: Type, variance: Int) =
- (variance > 0 || isSubType(t2, t1, depth)) &&
- (variance < 0 || isSubType(t1, t2, depth))
+ def isSubArg(t1: Type, t2: Type, variance: Variance) = (
+ (variance.isContravariant || isSubType(t1, t2, depth))
+ && (variance.isCovariant || isSubType(t2, t1, depth))
+ )
+
corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg)
}
- def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1
-
- /** Does type `tp1` conform to `tp2`? */
- private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
- if ((tp1 eq tp2) || isErrorOrWildcard(tp1) || isErrorOrWildcard(tp2)) return true
- if ((tp1 eq NoType) || (tp2 eq NoType)) return false
- if (tp1 eq NoPrefix) return (tp2 eq NoPrefix) || tp2.typeSymbol.isPackageClass // !! I do not see how the "isPackageClass" would be warranted by the spec
- if (tp2 eq NoPrefix) return tp1.typeSymbol.isPackageClass
- if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2)) return tp1 =:= tp2
- if (tp1.isHigherKinded || tp2.isHigherKinded) return isHKSubType0(tp1, tp2, depth)
-
- /** First try, on the right:
- * - unwrap Annotated types, BoundedWildcardTypes,
- * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
- * - handle common cases for first-kind TypeRefs on both sides as a fast path.
- */
- def firstTry = tp2 match {
- // fast path: two typerefs, none of them HK
- case tr2: TypeRef =>
- tp1 match {
- case tr1: TypeRef =>
- val sym1 = tr1.sym
- val sym2 = tr2.sym
- val pre1 = tr1.pre
- val pre2 = tr2.pre
- (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
- else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
- (isUnifiable(pre1, pre2) ||
- isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
- sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
- isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
- ||
- sym2.isClass && {
- val base = tr1 baseType sym2
- (base ne tr1) && isSubType(base, tr2, depth)
- }
- ||
- thirdTryRef(tr1, tr2))
- case _ =>
- secondTry
- }
- case AnnotatedType(_, _, _) =>
- isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
- annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- isSubType(tp1, bounds.hi, depth)
- case tv2 @ TypeVar(_, constr2) =>
- tp1 match {
- case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
- secondTry
- case _ =>
- tv2.registerBound(tp1, true)
- }
- case _ =>
- secondTry
- }
-
- /** Second try, on the left:
- * - unwrap AnnotatedTypes, BoundedWildcardTypes,
- * - bind typevars,
- * - handle existential types by skolemization.
- */
- def secondTry = tp1 match {
- case AnnotatedType(_, _, _) =>
- isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
- annotationsConform(tp1, tp2)
- case BoundedWildcardType(bounds) =>
- isSubType(tp1.bounds.lo, tp2, depth)
- case tv @ TypeVar(_,_) =>
- tv.registerBound(tp2, false)
- case ExistentialType(_, _) =>
- try {
- skolemizationLevel += 1
- isSubType(tp1.skolemizeExistential, tp2, depth)
- } finally {
- skolemizationLevel -= 1
- }
- case _ =>
- thirdTry
- }
-
- def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
- val sym2 = tp2.sym
- sym2 match {
- case NotNullClass => tp1.isNotNull
- case SingletonClass => tp1.isStable || fourthTry
- case _: ClassSymbol =>
- if (isRaw(sym2, tp2.args))
- isSubType(tp1, rawToExistential(tp2), depth)
- else if (sym2.name == tpnme.REFINE_CLASS_NAME)
- isSubType(tp1, sym2.info, depth)
- else
- fourthTry
- case _: TypeSymbol =>
- if (sym2 hasFlag DEFERRED) {
- val tp2a = tp2.bounds.lo
- isDifferentTypeConstructor(tp2, tp2a) &&
- isSubType(tp1, tp2a, depth) ||
- fourthTry
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- fourthTry
- }
- }
-
- /** Third try, on the right:
- * - decompose refined types.
- * - handle typerefs, existentials, and notnull types.
- * - handle left+right method types, polytypes, typebounds
- */
- def thirdTry = tp2 match {
- case tr2: TypeRef =>
- thirdTryRef(tp1, tr2)
- case rt2: RefinedType =>
- (rt2.parents forall (isSubType(tp1, _, depth))) &&
- (rt2.decls forall (specializesSym(tp1, _, depth)))
- case et2: ExistentialType =>
- et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
- case nn2: NotNullType =>
- tp1.isNotNull && isSubType(tp1, nn2.underlying, depth)
- case mt2: MethodType =>
- tp1 match {
- case mt1 @ MethodType(params1, res1) =>
- val params2 = mt2.params
- val res2 = mt2.resultType
- (sameLength(params1, params2) &&
- mt1.isImplicit == mt2.isImplicit &&
- matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- isSubType(res1.substSym(params1, params2), res2, depth))
- // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
- case _ =>
- false
- }
- case pt2 @ NullaryMethodType(_) =>
- tp1 match {
- // TODO: consider MethodType mt for which mt.params.isEmpty??
- case pt1 @ NullaryMethodType(_) =>
- isSubType(pt1.resultType, pt2.resultType, depth)
- case _ =>
- false
- }
- case TypeBounds(lo2, hi2) =>
- tp1 match {
- case TypeBounds(lo1, hi1) =>
- isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
- case _ =>
- false
- }
- case _ =>
- fourthTry
- }
-
- /** Fourth try, on the left:
- * - handle typerefs, refined types, notnull and singleton types.
- */
- def fourthTry = tp1 match {
- case tr1 @ TypeRef(pre1, sym1, _) =>
- sym1 match {
- case NothingClass => true
- case NullClass =>
- tp2 match {
- case TypeRef(_, sym2, _) =>
- containsNull(sym2)
- case _ =>
- isSingleType(tp2) && isSubType(tp1, tp2.widen, depth)
- }
- case _: ClassSymbol =>
- if (isRaw(sym1, tr1.args))
- isSubType(rawToExistential(tp1), tp2, depth)
- else if (sym1.isModuleClass) tp2 match {
- case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
- case _ => false
- }
- else if (sym1.isRefinementClass)
- isSubType(sym1.info, tp2, depth)
- else false
-
- case _: TypeSymbol =>
- if (sym1 hasFlag DEFERRED) {
- val tp1a = tp1.bounds.hi
- isDifferentTypeConstructor(tp1, tp1a) && isSubType(tp1a, tp2, depth)
- } else {
- isSubType(tp1.normalize, tp2.normalize, depth)
- }
- case _ =>
- false
- }
- case RefinedType(parents1, _) =>
- parents1 exists (isSubType(_, tp2, depth))
- case _: SingletonType | _: NotNullType =>
- isSubType(tp1.underlying, tp2, depth)
- case _ =>
- false
- }
+ def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean = {
+ def directlySpecializedBy(member: Symbol): Boolean = (
+ member == sym
+ || specializesSym(tp.narrow, member, sym.owner.thisType, sym, depth)
+ )
+ // Closure reduction, else this would be simply `member exists directlySpecializedBy`
+ def specializedBy(member: Symbol): Boolean = (
+ if (member eq NoSymbol) false
+ else if (member.isOverloaded) member.alternatives exists directlySpecializedBy
+ else directlySpecializedBy(member)
+ )
- firstTry
+ ( (tp.typeSymbol isBottomSubClass sym.owner)
+ || specializedBy(tp nonPrivateMember sym.name)
+ )
}
- private def containsNull(sym: Symbol): Boolean =
- sym.isClass && sym != NothingClass &&
- !(sym isNonBottomSubClass AnyValClass) &&
- !(sym isNonBottomSubClass NotNullClass)
-
- /** Are `tps1` and `tps2` lists of equal length such that all elements
- * of `tps1` conform to corresponding elements of `tps2`?
- */
- def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ <:< _)
-
- /** Does type `tp` implement symbol `sym` with same or
- * stronger type? Exact only if `sym` is a member of some
- * refinement type, otherwise we might return false negatives.
- */
- def specializesSym(tp: Type, sym: Symbol): Boolean =
- specializesSym(tp, sym, AnyDepth)
-
- def specializesSym(tp: Type, sym: Symbol, depth: Int): Boolean =
- tp.typeSymbol == NothingClass ||
- tp.typeSymbol == NullClass && containsNull(sym.owner) || {
- def specializedBy(membr: Symbol): Boolean =
- membr == sym || specializesSym(tp.narrow, membr, sym.owner.thisType, sym, depth)
- val member = tp.nonPrivateMember(sym.name)
- if (member eq NoSymbol) false
- else if (member.isOverloaded) member.alternatives exists specializedBy
- else specializedBy(member)
- // was
- // (tp.nonPrivateMember(sym.name).alternatives exists
- // (alt => sym == alt || specializesSym(tp.narrow, alt, sym.owner.thisType, sym, depth)))
- }
-
/** Does member `sym1` of `tp1` have a stronger type
* than member `sym2` of `tp2`?
*/
- private def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ protected[internal] def specializesSym(tp1: Type, sym1: Symbol, tp2: Type, sym2: Symbol, depth: Int): Boolean = {
+ require((sym1 ne NoSymbol) && (sym2 ne NoSymbol), ((tp1, sym1, tp2, sym2, depth)))
val info1 = tp1.memberInfo(sym1)
val info2 = tp2.memberInfo(sym2).substThis(tp2.typeSymbol, tp1)
//System.out.println("specializes "+tp1+"."+sym1+":"+info1+sym1.locationString+" AND "+tp2+"."+sym2+":"+info2)//DEBUG
- ( sym2.isTerm && isSubType(info1, info2, depth) && (!sym2.isStable || sym1.isStable)
+ ( sym2.isTerm && isSubType(info1, info2, depth) && (!sym2.isStable || sym1.isStable) && (!sym1.hasVolatileType || sym2.hasVolatileType)
|| sym2.isAbstractType && {
val memberTp1 = tp1.memberType(sym1)
// println("kinds conform? "+(memberTp1, tp1, sym2, kindsConform(List(sym2), List(memberTp1), tp2, sym2.owner)))
@@ -6280,7 +4147,7 @@ trait Types extends api.Types { self: SymbolTable =>
def lastTry =
tp2 match {
case ExistentialType(_, res2) if alwaysMatchSimple =>
- matchesType(tp1, res2, true)
+ matchesType(tp1, res2, alwaysMatchSimple = true)
case MethodType(_, _) =>
false
case PolyType(_, _) =>
@@ -6300,7 +4167,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (params1.isEmpty) matchesType(res1, res2, alwaysMatchSimple)
else matchesType(tp1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
+ alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case TypeRef(_, sym, Nil) =>
params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
@@ -6313,7 +4180,7 @@ trait Types extends api.Types { self: SymbolTable =>
case NullaryMethodType(res2) =>
matchesType(res1, res2, alwaysMatchSimple)
case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
+ alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case TypeRef(_, sym, Nil) if sym.isModuleClass =>
matchesType(res1, tp2, alwaysMatchSimple)
case _ =>
@@ -6327,7 +4194,7 @@ trait Types extends api.Types { self: SymbolTable =>
else
matchesQuantified(tparams1, tparams2, res1, res2)
case ExistentialType(_, res2) =>
- alwaysMatchSimple && matchesType(tp1, res2, true)
+ alwaysMatchSimple && matchesType(tp1, res2, alwaysMatchSimple = true)
case _ =>
false // remember that tparams1.nonEmpty is now an invariant of PolyType
}
@@ -6336,7 +4203,7 @@ trait Types extends api.Types { self: SymbolTable =>
case ExistentialType(tparams2, res2) =>
matchesQuantified(tparams1, tparams2, res1, res2)
case _ =>
- if (alwaysMatchSimple) matchesType(res1, tp2, true)
+ if (alwaysMatchSimple) matchesType(res1, tp2, alwaysMatchSimple = true)
else lastTry
}
case TypeRef(_, sym, Nil) if sym.isModuleClass =>
@@ -6389,7 +4256,7 @@ trait Types extends api.Types { self: SymbolTable =>
*/
/** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
- private def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
+ protected[internal] def matchingParams(syms1: List[Symbol], syms2: List[Symbol], syms1isJava: Boolean, syms2isJava: Boolean): Boolean = syms1 match {
case Nil =>
syms2.isEmpty
case sym1 :: rest1 =>
@@ -6410,7 +4277,7 @@ trait Types extends api.Types { self: SymbolTable =>
* `f` maps all elements to themselves.
*/
def map2Conserve[A <: AnyRef, B](xs: List[A], ys: List[B])(f: (A, B) => A): List[A] =
- if (xs.isEmpty) xs
+ if (xs.isEmpty || ys.isEmpty) xs
else {
val x1 = f(xs.head, ys.head)
val xs1 = map2Conserve(xs.tail, ys.tail)(f)
@@ -6418,87 +4285,6 @@ trait Types extends api.Types { self: SymbolTable =>
else x1 :: xs1
}
- /** Solve constraint collected in types `tvars`.
- *
- * @param tvars All type variables to be instantiated.
- * @param tparams The type parameters corresponding to `tvars`
- * @param variances The variances of type parameters; need to reverse
- * solution direction for all contravariant variables.
- * @param upper When `true` search for max solution else min.
- */
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean): Boolean =
- solve(tvars, tparams, variances, upper, AnyDepth)
-
- def solve(tvars: List[TypeVar], tparams: List[Symbol],
- variances: List[Int], upper: Boolean, depth: Int): Boolean = {
-
- def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) {
- if (tvar.constr.inst == NoType) {
- val up = if (variance != CONTRAVARIANT) upper else !upper
- tvar.constr.inst = null
- val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
- //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
- var cyclic = bound contains tparam
- foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
- val ok = (tparam2 != tparam) && (
- (bound contains tparam2)
- || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
- || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
- )
- if (ok) {
- if (tvar2.constr.inst eq null) cyclic = true
- solveOne(tvar2, tparam2, variance2)
- }
- })
- if (!cyclic) {
- if (up) {
- if (bound.typeSymbol != AnyClass) {
- log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
- tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.lo.dealias match {
- case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
- tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- } else {
- if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
- log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
- tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
- }
- for (tparam2 <- tparams)
- tparam2.info.bounds.hi.dealias match {
- case TypeRef(_, `tparam`, _) =>
- log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
- tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
- case _ =>
- }
- }
- }
- tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
-
- //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
- val newInst = (
- if (up) {
- if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
- } else {
- if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
- }
- )
- log(s"$tvar setInst $newInst")
- tvar setInst newInst
- //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
- }
- }
-
- // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
- foreach3(tvars, tparams, variances)(solveOne)
- tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
- }
-
/** Do type arguments `targs` conform to formal parameters `tparams`?
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
@@ -6511,563 +4297,12 @@ trait Types extends api.Types { self: SymbolTable =>
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
tparams map (_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds)
-// Lubs and Glbs ---------------------------------------------------------
-
- private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
- import util.TableDef
- import TableDef.Column
- def str(tp: Type) = {
- if (tp == NoType) ""
- else {
- val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
- if (s.length < 60) s
- else (s take 57) + "..."
- }
- }
-
- val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
- val maxSeqLength = sorted.map(_._2.size).max
- val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
- val transposed = padded.transpose
-
- val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
- case ((k, v), idx) =>
- Column(str(k), (xs: List[Type]) => str(xs(idx)), true)
- }
-
- val tableDef = TableDef(columns: _*)
- val formatted = tableDef.table(transposed)
- println("** Depth is " + depth + "\n" + formatted)
- }
-
- /** From a list of types, find any which take type parameters
- * where the type parameter bounds contain references to other
- * any types in the list (including itself.)
- *
- * @return List of symbol pairs holding the recursive type
- * parameter and the parameter which references it.
- */
- def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
- if (ts.isEmpty) Nil
- else {
- val sym = ts.head.typeSymbol
- require(ts.tail forall (_.typeSymbol == sym), ts)
- for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
- p -> in
- }
- }
-
- /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
- * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
- *
- * xs <= ys iff forall y in ys exists x in xs such that x <: y
- *
- * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
- * (these type parameters may be referred to by type arguments in the BTS column of those types,
- * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
- * @arg tsBts a matrix whose columns are basetype sequences
- * the first row is the original list of types for which we're computing the lub
- * (except that type constructors have been applied to their dummyArgs)
- * @See baseTypeSeq for a definition of sorted and upwards closed.
- */
- private def lubList(ts: List[Type], depth: Int): List[Type] = {
- // Matching the type params of one of the initial types means dummies.
- val initialTypeParams = ts map (_.typeParams)
- def isHotForTs(xs: List[Type]) = initialTypeParams contains (xs map (_.typeSymbol))
-
- def elimHigherOrderTypeParam(tp: Type) = tp match {
- case TypeRef(pre, sym, args) if args.nonEmpty && isHotForTs(args) => tp.typeConstructor
- case _ => tp
- }
- var lubListDepth = 0
- def loop(tsBts: List[List[Type]]): List[Type] = {
- lubListDepth += 1
-
- if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) Nil
- else if (tsBts.tail.isEmpty) tsBts.head
- else {
- // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
- // Invariant: all symbols "under" (closer to the first row) the frontier
- // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
- val ts0 = tsBts map (_.head)
-
- // Is the frontier made up of types with the same symbol?
- val isUniformFrontier = (ts0: @unchecked) match {
- case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
- }
-
- // Produce a single type for this frontier by merging the prefixes and arguments of those
- // typerefs that share the same symbol: that symbol is the current maximal symbol for which
- // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
- // merging, strip targs that refer to bound tparams (when we're computing the lub of type
- // constructors.) Also filter out all types that are a subtype of some other type.
- if (isUniformFrontier) {
- if (settings.debug.value || printLubs) {
- val fbounds = findRecursiveBounds(ts0)
- if (fbounds.nonEmpty) {
- println("Encountered " + fbounds.size + " recursive bounds while lubbing " + ts0.size + " types.")
- for ((p0, p1) <- fbounds) {
- val desc = if (p0 == p1) "its own bounds" else "the bounds of " + p1
-
- println(" " + p0.fullLocationString + " appears in " + desc)
- println(" " + p1 + " " + p1.info.bounds)
- }
- println("")
- }
- }
- val tails = tsBts map (_.tail)
- mergePrefixAndArgs(elimSub(ts0 map elimHigherOrderTypeParam, depth), 1, depth) match {
- case Some(tp) => tp :: loop(tails)
- case _ => loop(tails)
- }
- }
- else {
- // frontier is not uniform yet, move it beyond the current minimal symbol;
- // lather, rinSe, repeat
- val sym = minSym(ts0)
- val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
- if (printLubs) {
- val str = (newtps.zipWithIndex map { case (tps, idx) =>
- tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
- }).mkString("")
-
- println("Frontier(\n" + str + ")")
- printLubMatrix((ts zip tsBts).toMap, lubListDepth)
- }
-
- loop(newtps)
- }
- }
- }
-
- val initialBTSes = ts map (_.baseTypeSeq.toList)
- if (printLubs)
- printLubMatrix((ts zip initialBTSes).toMap, depth)
-
- loop(initialBTSes)
- }
-
- /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
- private def minSym(tps: List[Type]): Symbol =
- (tps.head.typeSymbol /: tps.tail) {
- (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
- }
-
- /** A minimal type list which has a given list of types as its base type sequence */
- def spanningTypes(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case first :: rest =>
- first :: spanningTypes(
- rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
- }
-
- /** Eliminate from list of types all elements which are a supertype
- * of some other element of the list. */
- private def elimSuper(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case List(t) => List(t)
- case t :: ts1 =>
- val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
- if (rest exists (t1 => t1 <:< t)) rest else t :: rest
- }
-
def elimAnonymousClass(t: Type) = t match {
case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass =>
clazz.classBound.asSeenFrom(pre, clazz.owner)
case _ =>
t
}
- def elimRefinement(t: Type) = t match {
- case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents)
- case _ => t
- }
-
- /** Eliminate from list of types all elements which are a subtype
- * of some other element of the list. */
- private def elimSub(ts: List[Type], depth: Int): List[Type] = {
- def elimSub0(ts: List[Type]): List[Type] = ts match {
- case List() => List()
- case List(t) => List(t)
- case t :: ts1 =>
- val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
- if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
- }
- val ts0 = elimSub0(ts)
- if (ts0.isEmpty || ts0.tail.isEmpty) ts0
- else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
- if (ts1 eq ts0) ts0
- else elimSub(ts1, depth)
- }
- }
-
- private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
- val quantified = ts flatMap {
- case ExistentialType(qs, _) => qs
- case t => List()
- }
- def stripType(tp: Type): Type = tp match {
- case ExistentialType(_, res) =>
- res
- case tv@TypeVar(_, constr) =>
- if (tv.instValid) stripType(constr.inst)
- else if (tv.untouchable) tv
- else abort("trying to do lub/glb of typevar "+tp)
- case t => t
- }
- val strippedTypes = ts mapConserve stripType
- (strippedTypes, quantified)
- }
-
- def weakLub(ts: List[Type]) =
- if (ts.nonEmpty && (ts forall isNumericValueType)) (numericLub(ts), true)
- else if (ts exists typeHasAnnotations)
- (annotationsLub(lub(ts map (_.withoutAnnotations)), ts), true)
- else (lub(ts), false)
-
- def weakGlb(ts: List[Type]) = {
- if (ts.nonEmpty && (ts forall isNumericValueType)) {
- val nglb = numericGlb(ts)
- if (nglb != NoType) (nglb, true)
- else (glb(ts), false)
- } else if (ts exists typeHasAnnotations) {
- (annotationsGlb(glb(ts map (_.withoutAnnotations)), ts), true)
- } else (glb(ts), false)
- }
-
- def numericLub(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t2
- else if (isNumericSubType(t2, t1)) t1
- else IntClass.tpe)
-
- def numericGlb(ts: List[Type]) =
- ts reduceLeft ((t1, t2) =>
- if (isNumericSubType(t1, t2)) t1
- else if (isNumericSubType(t2, t1)) t2
- else NoType)
-
- def isWeakSubType(tp1: Type, tp2: Type) =
- tp1.deconst.normalize match {
- case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- isNumericSubClass(sym1, sym2)
- case tv2 @ TypeVar(_, _) =>
- tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case tv1 @ TypeVar(_, _) =>
- tp2.deconst.normalize match {
- case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
- tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
- case _ =>
- isSubType(tp1, tp2)
- }
- case _ =>
- isSubType(tp1, tp2)
- }
-
- /** The isNumericValueType tests appear redundant, but without them
- * test/continuations-neg/function3.scala goes into an infinite loop.
- * (Even if the calls are to typeSymbolDirect.)
- */
- def isNumericSubType(tp1: Type, tp2: Type): Boolean = (
- isNumericValueType(tp1)
- && isNumericValueType(tp2)
- && isNumericSubClass(tp1.typeSymbol, tp2.typeSymbol)
- )
-
- private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
-
- def lub(ts: List[Type]): Type = ts match {
- case List() => NothingClass.tpe
- case List(t) => t
- case _ =>
- if (Statistics.canEnable) Statistics.incCounter(lubCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
- try {
- lub(ts, lubDepth(ts))
- } finally {
- lubResults.clear()
- glbResults.clear()
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- }
- }
-
- /** The least upper bound wrt <:< of a list of types */
- private def lub(ts: List[Type], depth: Int): Type = {
- def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
- case List() => NothingClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
- tparam.cloneSymbol.setInfo(glb(bounds, depth)))
- PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
- case ts @ (mt @ MethodType(params, _)) :: rest =>
- MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
- case ts @ AnnotatedType(annots, tpe, _) :: rest =>
- annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
- case ts =>
- lubResults get (depth, ts) match {
- case Some(lubType) =>
- lubType
- case None =>
- lubResults((depth, ts)) = AnyClass.tpe
- val res = if (depth < 0) AnyClass.tpe else lub1(ts)
- lubResults((depth, ts)) = res
- res
- }
- }
- def lub1(ts0: List[Type]): Type = {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val lubBaseTypes: List[Type] = lubList(ts, depth)
- val lubParents = spanningTypes(lubBaseTypes)
- val lubOwner = commonOwner(ts)
- val lubBase = intersectionType(lubParents, lubOwner)
- val lubType =
- if (phase.erasedTypes || depth == 0) lubBase
- else {
- val lubRefined = refinedType(lubParents, lubOwner)
- val lubThisType = lubRefined.typeSymbol.thisType
- val narrowts = ts map (_.narrow)
- def excludeFromLub(sym: Symbol) = (
- sym.isClass
- || sym.isConstructor
- || !sym.isPublic
- || isGetClass(sym)
- || narrowts.exists(t => !refines(t, sym))
- )
- def lubsym(proto: Symbol): Symbol = {
- val prototp = lubThisType.memberInfo(proto)
- val syms = narrowts map (t =>
- t.nonPrivateMember(proto.name).suchThat(sym =>
- sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
- if (syms contains NoSymbol) NoSymbol
- else {
- val symtypes =
- map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
- if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
- else if (symtypes.tail forall (symtypes.head =:= _))
- proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
- else {
- def lubBounds(bnds: List[TypeBounds]): TypeBounds =
- TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
- lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
- .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
- }
- }
- }
- def refines(tp: Type, sym: Symbol): Boolean = {
- val syms = tp.nonPrivateMember(sym.name).alternatives;
- !syms.isEmpty && (syms forall (alt =>
- // todo alt != sym is strictly speaking not correct, but without it we lose
- // efficiency.
- alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
- }
- // add a refinement symbol for all non-class members of lubBase
- // which are refined by every type in ts.
- for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
- try {
- val lsym = lubsym(sym)
- if (lsym != NoSymbol) addMember(lubThisType, lubRefined, lsym, depth)
- } catch {
- case ex: NoCommonType =>
- }
- }
- if (lubRefined.decls.isEmpty) lubBase
- else if (!verifyLubs) lubRefined
- else {
- // Verify that every given type conforms to the calculated lub.
- // In theory this should not be necessary, but higher-order type
- // parameters are not handled correctly.
- val ok = ts forall { t =>
- isSubType(t, lubRefined, depth) || {
- if (settings.debug.value || printLubs) {
- Console.println(
- "Malformed lub: " + lubRefined + "\n" +
- "Argument " + t + " does not conform. Falling back to " + lubBase
- )
- }
- false
- }
- }
- // If not, fall back on the more conservative calculation.
- if (ok) lubRefined
- else lubBase
- }
- }
- // dropRepeatedParamType is a localized fix for SI-6897. We should probably
- // integrate that transformation at a lower level in master, but lubs are
- // the likely and maybe only spot they escape, so fixing here for 2.10.1.
- existentialAbstraction(tparams, dropRepeatedParamType(lubType))
- }
- if (printLubs) {
- println(indent + "lub of " + ts + " at depth "+depth)//debug
- indent = indent + " "
- assert(indent.length <= 100)
- }
- if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
- val res = lub0(ts)
- if (printLubs) {
- indent = indent stripSuffix " "
- println(indent + "lub of " + ts + " is " + res)//debug
- }
- if (ts forall typeIsNotNull) res.notNull else res
- }
-
- val GlbFailure = new Throwable
-
- /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
- * call in `glb`. There's a possible infinite recursion when `specializes` calls
- * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
- * The counter breaks this recursion after two calls.
- * If the recursion is broken, no member is added to the glb.
- */
- private var globalGlbDepth = 0
- private final val globalGlbLimit = 2
-
- /** The greatest lower bound of a list of types (as determined by `<:<`). */
- def glb(ts: List[Type]): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts0 =>
- if (Statistics.canEnable) Statistics.incCounter(lubCount)
- val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
- try {
- glbNorm(ts0, lubDepth(ts0))
- } finally {
- lubResults.clear()
- glbResults.clear()
- if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
- }
- }
-
- private def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts0 => glbNorm(ts0, depth)
- }
-
- /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
- * with regard to `elimSuper`. */
- protected def glbNorm(ts: List[Type], depth: Int): Type = {
- def glb0(ts0: List[Type]): Type = ts0 match {
- case List() => AnyClass.tpe
- case List(t) => t
- case ts @ PolyType(tparams, _) :: _ =>
- val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
- tparam.cloneSymbol.setInfo(lub(bounds, depth)))
- PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
- case ts @ (mt @ MethodType(params, _)) :: rest =>
- MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
- case ts @ NullaryMethodType(_) :: rest =>
- NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
- case ts @ TypeBounds(_, _) :: rest =>
- TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
- case ts =>
- glbResults get (depth, ts) match {
- case Some(glbType) =>
- glbType
- case _ =>
- glbResults((depth, ts)) = NothingClass.tpe
- val res = if (depth < 0) NothingClass.tpe else glb1(ts)
- glbResults((depth, ts)) = res
- res
- }
- }
- def glb1(ts0: List[Type]): Type = {
- try {
- val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
- val glbOwner = commonOwner(ts)
- def refinedToParents(t: Type): List[Type] = t match {
- case RefinedType(ps, _) => ps flatMap refinedToParents
- case _ => List(t)
- }
- def refinedToDecls(t: Type): List[Scope] = t match {
- case RefinedType(ps, decls) =>
- val dss = ps flatMap refinedToDecls
- if (decls.isEmpty) dss else decls :: dss
- case _ => List()
- }
- val ts1 = ts flatMap refinedToParents
- val glbBase = intersectionType(ts1, glbOwner)
- val glbType =
- if (phase.erasedTypes || depth == 0) glbBase
- else {
- val glbRefined = refinedType(ts1, glbOwner)
- val glbThisType = glbRefined.typeSymbol.thisType
- def glbsym(proto: Symbol): Symbol = {
- val prototp = glbThisType.memberInfo(proto)
- val syms = for (t <- ts;
- alt <- (t.nonPrivateMember(proto.name).alternatives);
- if glbThisType.memberInfo(alt) matches prototp
- ) yield alt
- val symtypes = syms map glbThisType.memberInfo
- assert(!symtypes.isEmpty)
- proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
- if (proto.isTerm) glb(symtypes, decr(depth))
- else {
- def isTypeBound(tp: Type) = tp match {
- case TypeBounds(_, _) => true
- case _ => false
- }
- def glbBounds(bnds: List[Type]): TypeBounds = {
- val lo = lub(bnds map (_.bounds.lo), decr(depth))
- val hi = glb(bnds map (_.bounds.hi), decr(depth))
- if (lo <:< hi) TypeBounds(lo, hi)
- else throw GlbFailure
- }
- val symbounds = symtypes filter isTypeBound
- var result: Type =
- if (symbounds.isEmpty)
- TypeBounds.empty
- else glbBounds(symbounds)
- for (t <- symtypes if !isTypeBound(t))
- if (result.bounds containsType t) result = t
- else throw GlbFailure
- result
- })
- }
- if (globalGlbDepth < globalGlbLimit)
- try {
- globalGlbDepth += 1
- val dss = ts flatMap refinedToDecls
- for (ds <- dss; sym <- ds.iterator)
- if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
- try {
- addMember(glbThisType, glbRefined, glbsym(sym), depth)
- } catch {
- case ex: NoCommonType =>
- }
- } finally {
- globalGlbDepth -= 1
- }
- if (glbRefined.decls.isEmpty) glbBase else glbRefined
- }
- existentialAbstraction(tparams, glbType)
- } catch {
- case GlbFailure =>
- if (ts forall (t => NullClass.tpe <:< t)) NullClass.tpe
- else NothingClass.tpe
- }
- }
- // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
-
- if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
- val res = glb0(ts)
-
- // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
-
- if (ts exists typeIsNotNull) res.notNull else res
- }
/** A list of the typevars in a type. */
def typeVarsInType(tp: Type): List[TypeVar] = {
@@ -7078,29 +4313,33 @@ trait Types extends api.Types { self: SymbolTable =>
}
tvs.reverse
}
- /** Make each type var in this type use its original type for comparisons instead
- * of collecting constraints.
- */
- def suspendTypeVarsInType(tp: Type): List[TypeVar] = {
- val tvs = typeVarsInType(tp)
- // !!! Is it somehow guaranteed that this will not break under nesting?
- // In general one has to save and restore the contents of the field...
+
+ // If this type contains type variables, put them to sleep for a while.
+ // Don't just wipe them out by replacing them by the corresponding type
+ // parameter, as that messes up (e.g.) type variables in type refinements.
+ // Without this, the matchesType call would lead to type variables on both
+ // sides of a subtyping/equality judgement, which can lead to recursive types
+ // being constructed. See pos/t0851 for a situation where this happens.
+ @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = {
+ val saved = tvs map (_.suspended)
tvs foreach (_.suspended = true)
- tvs
+
+ try op
+ finally foreach2(tvs, saved)(_.suspended = _)
}
- /** Compute lub (if `variance == 1`) or glb (if `variance == -1`) of given list
+ /** Compute lub (if `variance == Covariant`) or glb (if `variance == Contravariant`) of given list
* of types `tps`. All types in `tps` are typerefs or singletypes
* with the same symbol.
* Return `Some(x)` if the computation succeeds with result `x`.
* Return `None` if the computation fails.
*/
- def mergePrefixAndArgs(tps: List[Type], variance: Int, depth: Int): Option[Type] = tps match {
+ def mergePrefixAndArgs(tps: List[Type], variance: Variance, depth: Int): Option[Type] = tps match {
case List(tp) =>
Some(tp)
case TypeRef(_, sym, _) :: rest =>
val pres = tps map (_.prefix) // prefix normalizes automatically
- val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+ val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
val argss = tps map (_.normalize.typeArgs) // symbol equality (of the tp in tps) was checked using typeSymbol, which normalizes, so should normalize before retrieving arguments
val capturedParams = new ListBuffer[Symbol]
try {
@@ -7113,7 +4352,7 @@ trait Types extends api.Types { self: SymbolTable =>
} else {
val args = argss map (_.head)
if (args.tail forall (_ =:= args.head)) Some(typeRef(pre, sym, List(args.head)))
- else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) Some(ObjectClass.tpe)
+ else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) Some(ObjectTpe)
else Some(typeRef(pre, sym, List(lub(args))))
}
}
@@ -7122,30 +4361,28 @@ trait Types extends api.Types { self: SymbolTable =>
// transpose freaked out because of irregular argss
// catching just in case (shouldn't happen, but also doesn't cost us)
// [JZ] It happens: see SI-5683.
- debuglog("transposed irregular matrix!?" +(tps, argss))
+ debuglog(s"transposed irregular matrix!? tps=$tps argss=$argss")
None
case Some(argsst) =>
- val args = map2(sym.typeParams, argsst) { (tparam, as) =>
- if (depth == 0) {
- if (tparam.variance == variance) {
- // Take the intersection of the upper bounds of the type parameters
- // rather than falling all the way back to "Any", otherwise we end up not
- // conforming to bounds.
- val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass)
- if (bounds0.isEmpty) AnyClass.tpe
- else intersectionType(bounds0 map (b => b.asSeenFrom(tps.head, sym)))
- }
- else if (tparam.variance == -variance) NothingClass.tpe
- else NoType
+ val args = map2(sym.typeParams, argsst) { (tparam, as0) =>
+ val as = as0.distinct
+ if (as.size == 1) as.head
+ else if (depth == 0) {
+ log("Giving up merging args: can't unify %s under %s".format(as.mkString(", "), tparam.fullLocationString))
+ // Don't return "Any" (or "Nothing") when we have to give up due to
+ // recursion depth. Return NoType, which prevents us from poisoning
+ // lublist's results. It can recognize the recursion and deal with it, but
+ // only if we aren't returning invalid types.
+ NoType
}
else {
if (tparam.variance == variance) lub(as, decr(depth))
- else if (tparam.variance == -variance) glb(as, decr(depth))
+ else if (tparam.variance == variance.flip) glb(as, decr(depth))
else {
val l = lub(as, decr(depth))
val g = glb(as, decr(depth))
if (l <:< g) l
- else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
+ else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we
// just err on the conservative side, i.e. with a bound that is too high.
// if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251
@@ -7164,7 +4401,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
case SingleType(_, sym) :: rest =>
val pres = tps map (_.prefix)
- val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
+ val pre = if (variance.isPositive) lub(pres, depth) else glb(pres, depth)
try {
Some(singleType(pre, sym))
} catch {
@@ -7188,7 +4425,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (sym.isTerm)
for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
if (specializesSym(thistp, sym, thistp, alt, depth))
- tp.decls unlink alt;
+ tp.decls unlink alt
tp.decls enter sym
}
}
@@ -7201,51 +4438,6 @@ trait Types extends api.Types { self: SymbolTable =>
def inheritsJavaVarArgsMethod(clazz: Symbol) =
clazz.thisType.baseClasses exists isJavaVarargsAncestor
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of list of bounds infos, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
- def getBounds(tp: Type): List[Type] = tp match {
- case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
- tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
- case tp =>
- if (tp ne tp.normalize) getBounds(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map getBounds
- }
-
- /** All types in list must be polytypes with type parameter lists of
- * same length as tparams.
- * Returns list of instance types, where corresponding type
- * parameters are renamed to tparams.
- */
- private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
- def transformResultType(tp: Type): Type = tp match {
- case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
- restpe.substSym(tparams1, tparams)
- case tp =>
- if (tp ne tp.normalize) transformResultType(tp.normalize)
- else throw new NoCommonType(tps)
- }
- tps map transformResultType
- }
-
- /** All types in list must be method types with equal parameter types.
- * Returns list of their result types.
- */
- private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
- tps map {
- case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
- res
- case NullaryMethodType(res) if pts.isEmpty =>
- res
- case _ =>
- throw new NoCommonType(tps)
- }
-
// Errors and Diagnostics -----------------------------------------------------
/** A throwable signalling a type error */
@@ -7258,7 +4450,7 @@ trait Types extends api.Types { self: SymbolTable =>
/** An exception for cyclic references from which we can recover */
case class RecoverableCyclicReference(sym: Symbol)
extends TypeError("illegal cyclic reference involving " + sym) {
- if (settings.debug.value) printStackTrace()
+ if (settings.debug) printStackTrace()
}
class NoCommonType(tps: List[Type]) extends Throwable(
@@ -7270,26 +4462,26 @@ trait Types extends api.Types { self: SymbolTable =>
}
/** The current indentation string for traces */
- private var indent: String = ""
+ protected[internal] var indent: String = ""
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
- Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
+ inform(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/)
indent = indent + " "
val result = p(tp1, arg2)
indent = indent stripSuffix " "
- Console.println(indent + result)
+ inform(indent + result)
result
}
/** If option `explaintypes` is set, print a subtype trace for `found <:< required`. */
def explainTypes(found: Type, required: Type) {
- if (settings.explaintypes.value) withTypesExplained(found <:< required)
+ if (settings.explaintypes) withTypesExplained(found <:< required)
}
/** If option `explaintypes` is set, print a subtype trace for `op(found, required)`. */
def explainTypes(op: (Type, Type) => Any, found: Type, required: Type) {
- if (settings.explaintypes.value) withTypesExplained(op(found, required))
+ if (settings.explaintypes) withTypesExplained(op(found, required))
}
/** Execute `op` while printing a trace of the operations on types executed. */
@@ -7299,28 +4491,31 @@ trait Types extends api.Types { self: SymbolTable =>
}
def isUnboundedGeneric(tp: Type) = tp match {
- case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefClass.tpe)
+ case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefTpe)
case _ => false
}
def isBoundedGeneric(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefClass.tpe)
+ case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefTpe)
case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym)
case _ => false
}
// Add serializable to a list of parents, unless one of them already is
def addSerializable(ps: Type*): List[Type] = (
if (ps exists typeIsSubTypeOfSerializable) ps.toList
- else (ps :+ SerializableClass.tpe).toList
+ else (ps :+ SerializableTpe).toList
)
/** Members of the given class, other than those inherited
* from Any or AnyRef.
*/
- def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
- clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+ def nonTrivialMembers(clazz: Symbol): Scope = clazz.info.members filterNot isUniversalMember
+
+ /** Members which can be imported into other scopes.
+ */
+ def importableMembers(pre: Type): Scope = pre.members filter isImportable
def objToAny(tp: Type): Type =
- if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
+ if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyTpe
else tp
val shorthands = Set(
@@ -7333,32 +4528,8 @@ trait Types extends api.Types { self: SymbolTable =>
"scala.collection.IndexedSeq",
"scala.collection.Iterator")
-
- /** The maximum number of recursions allowed in toString
- */
- final val maxTostringRecursions = 50
-
- private var tostringRecursions = 0
-
- protected def typeToString(tpe: Type): String =
- if (tostringRecursions >= maxTostringRecursions) {
- debugwarn("Exceeded recursion depth attempting to print type.")
- if (settings.debug.value)
- (new Throwable).printStackTrace
-
- "..."
- }
- else
- try {
- tostringRecursions += 1
- tpe.safeToString
- } finally {
- tostringRecursions -= 1
- }
-
// ----- Hoisted closures and convenience methods, for compile time reductions -------
- private[scala] val typeIsNotNull = (tp: Type) => tp.isNotNull
private[scala] val isTypeVar = (tp: Type) => tp.isInstanceOf[TypeVar]
private[scala] val typeContainsTypeVar = (tp: Type) => tp exists isTypeVar
private[scala] val typeIsNonClassType = (tp: Type) => tp.typeSymbolDirect.isNonClassType
@@ -7368,11 +4539,54 @@ trait Types extends api.Types { self: SymbolTable =>
private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
- private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableClass.tpe
+ private[scala] val typeIsSubTypeOfSerializable = (tp: Type) => tp <:< SerializableTpe
private[scala] val typeIsNothing = (tp: Type) => tp.typeSymbolDirect eq NothingClass
private[scala] val typeIsAny = (tp: Type) => tp.typeSymbolDirect eq AnyClass
private[scala] val typeIsHigherKinded = (tp: Type) => tp.isHigherKinded
+ /** The maximum depth of type `tp` */
+ def typeDepth(tp: Type): Int = tp match {
+ case TypeRef(pre, sym, args) =>
+ math.max(typeDepth(pre), typeDepth(args) + 1)
+ case RefinedType(parents, decls) =>
+ math.max(typeDepth(parents), symTypeDepth(decls.toList) + 1)
+ case TypeBounds(lo, hi) =>
+ math.max(typeDepth(lo), typeDepth(hi))
+ case MethodType(paramtypes, result) =>
+ typeDepth(result)
+ case NullaryMethodType(result) =>
+ typeDepth(result)
+ case PolyType(tparams, result) =>
+ math.max(typeDepth(result), symTypeDepth(tparams) + 1)
+ case ExistentialType(tparams, result) =>
+ math.max(typeDepth(result), symTypeDepth(tparams) + 1)
+ case _ =>
+ 1
+ }
+
+ def withUncheckedVariance(tp: Type): Type =
+ tp withAnnotation (AnnotationInfo marker uncheckedVarianceClass.tpe)
+
+ //OPT replaced with tailrecursive function to save on #closures
+ // was:
+ // var d = 0
+ // for (tp <- tps) d = d max by(tp) //!!!OPT!!!
+ // d
+ private[scala] def maxDepth(tps: List[Type]): Int = {
+ @tailrec def loop(tps: List[Type], acc: Int): Int = tps match {
+ case tp :: rest => loop(rest, math.max(acc, typeDepth(tp)))
+ case _ => acc
+ }
+ loop(tps, 0)
+ }
+ private[scala] def maxBaseTypeSeqDepth(tps: List[Type]): Int = {
+ @tailrec def loop(tps: List[Type], acc: Int): Int = tps match {
+ case tp :: rest => loop(rest, math.max(acc, tp.baseTypeSeqDepth))
+ case _ => acc
+ }
+ loop(tps, 0)
+ }
+
@tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match {
case tp :: rest => (tp contains sym) || typesContain(rest, sym)
case _ => false
@@ -7412,7 +4626,6 @@ trait Types extends api.Types { self: SymbolTable =>
object TypesStats {
import BaseTypeSeqsStats._
val rawTypeCount = Statistics.newCounter ("#raw type creations")
- val asSeenFromCount = Statistics.newCounter ("#asSeenFrom ops")
val subtypeCount = Statistics.newCounter ("#subtype ops")
val sametypeCount = Statistics.newCounter ("#sametype ops")
val lubCount = Statistics.newCounter ("#toplevel lubs/glbs")
@@ -7434,7 +4647,7 @@ object TypesStats {
val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount)
val typeOpsStack = Statistics.newTimerStack()
- /** Commented out, because right now this does not inline, so creates a closure which will distort statistics
+ /* Commented out, because right now this does not inline, so creates a closure which will distort statistics
@inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = {
val start = Statistics.pushTimer(typeOpsStack, c)
try op
diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala
new file mode 100644
index 0000000000..3480161567
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Variance.scala
@@ -0,0 +1,91 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package reflect
+package internal
+
+import Variance._
+
+/** Variances form a lattice:
+ *
+ * - Covariant -
+ * / \
+ * Invariant Bivariant
+ * \ /
+ * Contravariant
+ *
+ * The variance of a symbol within a type is calculated based on variance
+ * annotations, e.g. +A or -A, and the positions of the types in which the
+ * symbol appears. The actual mechanics are beyond the scope of this
+ * comment, but the essential operations on a Variance are:
+ *
+ * '&' - like bitwise AND. Unless all inputs have compatible variance,
+ * folding them across & will be invariant.
+ * '*' - like multiplication across { -1, 0, 1 } with contravariance as -1.
+ * flip - if contravariant or covariant, flip to the other; otherwise leave unchanged.
+ * cut - if bivariant, remain bivariant; otherwise become invariant.
+ *
+ * There is an important distinction between "isPositive" and "isCovariant".
+ * The former is true for both Covariant and Bivariant, but the latter is true
+ * only for Covariant.
+ */
+final class Variance private (val flags: Int) extends AnyVal {
+ def isBivariant = flags == 2
+ def isCovariant = flags == 1 // excludes bivariant
+ def isInvariant = flags == 0
+ def isContravariant = flags == -1 // excludes bivariant
+ def isPositive = flags > 0 // covariant or bivariant
+
+ def &(other: Variance): Variance = (
+ if (this == other) this
+ else if (this.isBivariant) other
+ else if (other.isBivariant) this
+ else Invariant
+ )
+
+ def *(other: Variance): Variance = (
+ if (other.isPositive) this
+ else if (other.isContravariant) this.flip
+ else this.cut
+ )
+
+ /** Flip between covariant and contravariant. I chose not to use unary_- because it doesn't stand out enough. */
+ def flip = if (isCovariant) Contravariant else if (isContravariant) Covariant else this
+
+ /** Map everything below bivariant to invariant. */
+ def cut = if (isBivariant) this else Invariant
+
+ /** The symbolic annotation used to indicate the given kind of variance. */
+ def symbolicString = (
+ if (isBivariant) "+/-"
+ else if (isCovariant) "+"
+ else if (isContravariant) "-"
+ else ""
+ )
+
+ override def toString = (
+ if (isContravariant) "contravariant"
+ else if (isCovariant) "covariant"
+ else if (isInvariant) "invariant"
+ else "" // noisy to print bivariant on everything without type parameters
+ )
+}
+
+object Variance {
+ implicit class SbtCompat(val v: Variance) {
+ def < (other: Int) = v.flags < other
+ def > (other: Int) = v.flags > other
+ }
+
+ def fold(variances: List[Variance]): Variance = (
+ if (variances.isEmpty) Bivariant
+ else variances reduceLeft (_ & _)
+ )
+ val Bivariant = new Variance(2)
+ val Covariant = new Variance(1)
+ val Contravariant = new Variance(-1)
+ val Invariant = new Variance(0)
+}
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
new file mode 100644
index 0000000000..78df3c9617
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -0,0 +1,197 @@
+/* NSC -- new scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package reflect
+package internal
+
+import Variance._
+import scala.collection.{ mutable, immutable }
+import scala.annotation.tailrec
+
+/** See comments at scala.reflect.internal.Variance.
+ */
+trait Variances {
+ self: SymbolTable =>
+
+ /** Used in Refchecks.
+ * TODO - eliminate duplication with varianceInType
+ */
+ class VarianceValidator extends Traverser {
+ private val escapedLocals = mutable.HashSet[Symbol]()
+ // A flag for when we're in a refinement, meaning method parameter types
+ // need to be checked.
+ private var inRefinement = false
+ @inline private def withinRefinement(body: => Type): Type = {
+ val saved = inRefinement
+ inRefinement = true
+ try body finally inRefinement = saved
+ }
+
+ /** Is every symbol in the owner chain between `site` and the owner of `sym`
+ * either a term symbol or private[this]? If not, add `sym` to the set of
+ * esacped locals.
+ * @pre sym.hasLocalFlag
+ */
+ @tailrec final def checkForEscape(sym: Symbol, site: Symbol) {
+ if (site == sym.owner || site == sym.owner.moduleClass || site.isPackage) () // done
+ else if (site.isTerm || site.isPrivateLocal) checkForEscape(sym, site.owner) // ok - recurse to owner
+ else escapedLocals += sym
+ }
+
+ protected def issueVarianceError(base: Symbol, sym: Symbol, required: Variance): Unit = ()
+
+ // Flip occurrences of type parameters and parameters, unless
+ // - it's a constructor, or case class factory or extractor
+ // - it's a type parameter of tvar's owner.
+ def shouldFlip(sym: Symbol, tvar: Symbol) = (
+ sym.isParameter
+ && !(tvar.isTypeParameterOrSkolem && sym.isTypeParameterOrSkolem && tvar.owner == sym.owner)
+ )
+ // return Bivariant if `sym` is local to a term
+ // or is private[this] or protected[this]
+ def isLocalOnly(sym: Symbol) = !sym.owner.isClass || (
+ sym.isTerm
+ && (sym.hasLocalFlag || sym.isSuperAccessor) // super accessors are implicitly local #4345
+ && !escapedLocals(sym)
+ )
+
+ private object ValidateVarianceMap extends TypeMap(trackVariance = true) {
+ private var base: Symbol = _
+
+ /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`.
+ * The search proceeds from `base` to the owner of `tvar`.
+ * Initially the state is covariant, but it might change along the search.
+ *
+ * An alias which does not override anything is treated as Bivariant;
+ * this is OK because we always expand aliases for variance checking.
+ * However if it does override a type in a base class, we must assume Invariant
+ * because there may be references to the type parameter that are not checked.
+ */
+ def relativeVariance(tvar: Symbol): Variance = {
+ def nextVariance(sym: Symbol, v: Variance): Variance = (
+ if (shouldFlip(sym, tvar)) v.flip
+ else if (isLocalOnly(sym)) Bivariant
+ else if (sym.isAliasType) Invariant
+ else v
+ )
+ def loop(sym: Symbol, v: Variance): Variance = (
+ if (sym == tvar.owner || v.isBivariant) v
+ else loop(sym.owner, nextVariance(sym, v))
+ )
+ loop(base, Covariant)
+ }
+ def isUncheckedVariance(tp: Type) = tp match {
+ case AnnotatedType(annots, _, _) => annots exists (_ matches definitions.uncheckedVarianceClass)
+ case _ => false
+ }
+
+ private def checkVarianceOfSymbol(sym: Symbol) {
+ val relative = relativeVariance(sym)
+ val required = relative * variance
+ if (!relative.isBivariant) {
+ log(s"verifying $sym (${sym.variance}${sym.locationString}) is $required at $base in ${base.owner}")
+ if (sym.variance != required)
+ issueVarianceError(base, sym, required)
+ }
+ }
+ override def mapOver(decls: Scope): Scope = {
+ decls foreach (sym => withVariance(if (sym.isAliasType) Invariant else variance)(this(sym.info)))
+ decls
+ }
+ private def resultTypeOnly(tp: Type) = tp match {
+ case mt: MethodType => !inRefinement
+ case pt: PolyType => true
+ case _ => false
+ }
+
+ /** For PolyTypes, type parameters are skipped because they are defined
+ * explicitly (their TypeDefs will be passed here.) For MethodTypes, the
+ * same is true of the parameters (ValDefs) unless we are inside a
+ * refinement, in which case they are checked from here.
+ */
+ def apply(tp: Type): Type = tp match {
+ case _ if isUncheckedVariance(tp) => tp
+ case _ if resultTypeOnly(tp) => this(tp.resultType)
+ case TypeRef(_, sym, _) if sym.isAliasType => this(tp.normalize)
+ case TypeRef(_, sym, _) if !sym.variance.isInvariant => checkVarianceOfSymbol(sym) ; mapOver(tp)
+ case RefinedType(_, _) => withinRefinement(mapOver(tp))
+ case ClassInfoType(parents, _, _) => parents foreach this ; tp
+ case mt @ MethodType(_, result) => flipped(mt.paramTypes foreach this) ; this(result)
+ case _ => mapOver(tp)
+ }
+ def validateDefinition(base: Symbol) {
+ val saved = this.base
+ this.base = base
+ try apply(base.info)
+ finally this.base = saved
+ }
+ }
+
+ /** Validate variance of info of symbol `base` */
+ private def validateVariance(base: Symbol) {
+ ValidateVarianceMap validateDefinition base
+ }
+
+ override def traverse(tree: Tree) {
+ def sym = tree.symbol
+ // No variance check for object-private/protected methods/values.
+ // Or constructors, or case class factory or extractor.
+ def skip = (
+ sym.hasLocalFlag
+ || sym.owner.isConstructor
+ || sym.owner.isCaseApplyOrUnapply
+ )
+ tree match {
+ case defn: MemberDef if skip =>
+ log(s"Skipping variance check of ${sym.defString}")
+ case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) =>
+ validateVariance(sym)
+ super.traverse(tree)
+ // ModuleDefs need not be considered because they have been eliminated already
+ case ValDef(_, _, _, _) =>
+ validateVariance(sym)
+ case DefDef(_, _, tparams, vparamss, _, _) =>
+ validateVariance(sym)
+ traverseTrees(tparams)
+ traverseTreess(vparamss)
+ case Template(_, _, _) =>
+ super.traverse(tree)
+ case _ =>
+ }
+ }
+ }
+
+ /** Compute variance of type parameter `tparam` in all types `tps`. */
+ def varianceInTypes(tps: List[Type])(tparam: Symbol): Variance =
+ fold(tps map (tp => varianceInType(tp)(tparam)))
+
+ /** Compute variance of type parameter `tparam` in type `tp`. */
+ def varianceInType(tp: Type)(tparam: Symbol): Variance = {
+ def inArgs(sym: Symbol, args: List[Type]): Variance = fold(map2(args, sym.typeParams)((a, p) => inType(a) * p.variance))
+ def inSyms(syms: List[Symbol]): Variance = fold(syms map inSym)
+ def inTypes(tps: List[Type]): Variance = fold(tps map inType)
+
+ def inSym(sym: Symbol): Variance = if (sym.isAliasType) inType(sym.info).cut else inType(sym.info)
+ def inType(tp: Type): Variance = tp match {
+ case ErrorType | WildcardType | NoType | NoPrefix => Bivariant
+ case ThisType(_) | ConstantType(_) => Bivariant
+ case TypeRef(_, `tparam`, _) => Covariant
+ case BoundedWildcardType(bounds) => inType(bounds)
+ case NullaryMethodType(restpe) => inType(restpe)
+ case SingleType(pre, sym) => inType(pre)
+ case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args
+ case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args)
+ case TypeBounds(lo, hi) => inType(lo).flip & inType(hi)
+ case RefinedType(parents, defs) => inTypes(parents) & inSyms(defs.toList)
+ case MethodType(params, restpe) => inSyms(params).flip & inType(restpe)
+ case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe)
+ case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe)
+ case AnnotatedType(annots, tp, _) => inTypes(annots map (_.atp)) & inType(tp)
+ }
+
+ inType(tp)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
index 058ff61fbf..2c9f909629 100644
--- a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
+++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package annotations
diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
index 367a3b8b19..eb266e8125 100644
--- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
+++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala
@@ -5,7 +5,8 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala.reflect.internal.pickling
+package scala
+package reflect.internal.pickling
object ByteCodecs {
@@ -127,7 +128,7 @@ object ByteCodecs {
var j = 0
val dstlen = (srclen * 7 + 7) / 8
while (i + 7 < srclen) {
- var out: Int = src(i)
+ var out: Int = src(i).toInt
var in: Byte = src(i + 1)
src(j) = (out | (in & 0x01) << 7).toByte
out = in >>> 1
@@ -152,7 +153,7 @@ object ByteCodecs {
j += 7
}
if (i < srclen) {
- var out: Int = src(i)
+ var out: Int = src(i).toInt
if (i + 1 < srclen) {
var in: Byte = src(i + 1)
src(j) = (out | (in & 0x01) << 7).toByte; j += 1
@@ -195,10 +196,10 @@ object ByteCodecs {
*
* Sometimes returns (length+1) of the decoded array. Example:
*
- * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * scala> val enc = scala.reflect.generic.ByteCodecs.encode(Array(1,2,3))
* enc: Array[Byte] = Array(2, 5, 13, 1)
*
- * scala> reflect.generic.ByteCodecs.decode(enc)
+ * scala> scala.reflect.generic.ByteCodecs.decode(enc)
* res43: Int = 4
*
* scala> enc
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
index 6170fcbb90..a814256f8e 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
package pickling
@@ -62,11 +63,8 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
writeByte((x & 0x7f).toInt)
}
- /** Write a natural number <code>x</code> at position <code>pos</code>.
+ /** Write a natural number `x` at position `pos`.
* If number is more than one byte, shift rest of array to make space.
- *
- * @param pos ...
- * @param x ...
*/
def patchNat(pos: Int, x: Int) {
def patchNatPrefix(x: Int) {
@@ -81,7 +79,7 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
if (y != 0) patchNatPrefix(y)
}
- /** Write a long number <code>x</code> in signed big endian format, base 256.
+ /** Write a long number `x` in signed big endian format, base 256.
*
* @param x The long number to be written.
*/
@@ -94,12 +92,9 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
// -- Basic input routines --------------------------------------------
- /** Peek at the current byte without moving the read index */
- def peekByte(): Int = bytes(readIndex)
-
/** Read a byte */
def readByte(): Int = {
- val x = bytes(readIndex); readIndex += 1; x
+ val x = bytes(readIndex).toInt; readIndex += 1; x
}
/** Read a natural number in big endian format, base 128.
@@ -110,9 +105,9 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
var b = 0L
var x = 0L
do {
- b = readByte()
+ b = readByte().toLong
x = (x << 7) + (b & 0x7f)
- } while ((b & 0x80) != 0L);
+ } while ((b & 0x80) != 0L)
x
}
@@ -151,18 +146,14 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
result.toIndexedSeq
}
- /** Perform operation <code>op</code> until the condition
- * <code>readIndex == end</code> is satisfied.
+ /** Perform operation `op` until the condition
+ * `readIndex == end` is satisfied.
* Concatenate results into a list.
- *
- * @param end ...
- * @param op ...
- * @return ...
*/
def until[T](end: Int, op: () => T): List[T] =
- if (readIndex == end) List() else op() :: until(end, op);
+ if (readIndex == end) List() else op() :: until(end, op)
- /** Perform operation <code>op</code> the number of
+ /** Perform operation `op` the number of
* times specified. Concatenate the results into a list.
*/
def times[T](n: Int, op: ()=>T): List[T] =
diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
index 16747af08a..ce0ceec688 100644
--- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
+++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package pickling
@@ -56,7 +57,7 @@ object PickleFormat {
* | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
* | 43 ANNOTINFO len_Nat AnnotInfoBody
* | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
- * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
+ * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat /* no longer needed */
* | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
* | 49 TREE len_Nat 1 EMPTYtree
* | 49 TREE len_Nat 2 PACKAGEtree type_Ref sym_Ref mods_Ref name_Ref {tree_Ref}
@@ -115,7 +116,6 @@ object PickleFormat {
*/
val MajorVersion = 5
val MinorVersion = 0
- def VersionString = "V" + MajorVersion + "." + MinorVersion
final val TERMname = 1
final val TYPEname = 2
@@ -161,7 +161,7 @@ object PickleFormat {
final val ANNOTARGARRAY = 44
final val SUPERtpe = 46
- final val DEBRUIJNINDEXtpe = 47
+ final val DEBRUIJNINDEXtpe = 47 // no longer generated
final val EXISTENTIALtpe = 48
final val TREE = 49 // prefix code that means a tree is coming
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index 603fff4f1c..6cffdbc193 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.reflect
+package scala
+package reflect
package internal
package pickling
@@ -28,8 +29,8 @@ abstract class UnPickler {
* from an array of bytes.
* @param bytes bytearray from which we unpickle
* @param offset offset from which unpickling starts
- * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
- * @param moduleroot the top-level module which is unpickled, or NoSymbol if inapplicable
+ * @param classRoot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleRoot the top-level module which is unpickled, or NoSymbol if inapplicable
* @param filename filename associated with bytearray, only used for error messages
*/
def unpickle(bytes: Array[Byte], offset: Int, classRoot: Symbol, moduleRoot: Symbol, filename: String) {
@@ -159,9 +160,9 @@ abstract class UnPickler {
result
}
- /** If entry at <code>i</code> is undefined, define it by performing
- * operation <code>op</code> with <code>readIndex at start of i'th
- * entry. Restore <code>readIndex</code> afterwards.
+ /** If entry at `i` is undefined, define it by performing
+ * operation `op` with `readIndex at start of i'th
+ * entry. Restore `readIndex` afterwards.
*/
protected def at[T <: AnyRef](i: Int, op: () => T): T = {
var r = entries(i)
@@ -186,13 +187,12 @@ abstract class UnPickler {
case _ => errorBadSignature("bad name tag: " + tag)
}
}
- protected def readTermName(): TermName = readName().toTermName
- protected def readTypeName(): TypeName = readName().toTypeName
+ private def readEnd() = readNat() + readIndex
/** Read a symbol */
protected def readSymbol(): Symbol = {
val tag = readByte()
- val end = readNat() + readIndex
+ val end = readEnd()
def atEnd = readIndex == end
def readExtSymbol(): Symbol = {
@@ -325,7 +325,7 @@ abstract class UnPickler {
*/
protected def readType(forceProperType: Boolean = false): Type = {
val tag = readByte()
- val end = readNat() + readIndex
+ val end = readEnd()
(tag: @switch) match {
case NOtpe =>
NoType
@@ -344,7 +344,7 @@ abstract class UnPickler {
case TYPEREFtpe =>
val pre = readTypeRef()
val sym = readSymbolRef()
- var args = until(end, readTypeRef)
+ val args = until(end, readTypeRef)
TypeRef(pre, sym, args)
case TYPEBOUNDStpe =>
TypeBounds(readTypeRef(), readTypeRef())
@@ -431,7 +431,7 @@ abstract class UnPickler {
protected def readChildren() {
val tag = readByte()
assert(tag == CHILDREN)
- val end = readNat() + readIndex
+ val end = readEnd()
val target = readSymbolRef()
while (readIndex != end) target addChild readSymbolRef()
}
@@ -450,7 +450,7 @@ abstract class UnPickler {
*/
private def readArrayAnnot() = {
readByte() // skip the `annotargarray` tag
- val end = readNat() + readIndex
+ val end = readEnd()
until(end, () => readClassfileAnnotArg(readNat())).toArray(JavaArgumentTag)
}
protected def readClassfileAnnotArg(i: Int): ClassfileAnnotArg = bytes(index(i)) match {
@@ -486,7 +486,7 @@ abstract class UnPickler {
val tag = readByte()
if (tag != SYMANNOT)
errorBadSignature("symbol annotation expected ("+ tag +")")
- val end = readNat() + readIndex
+ val end = readEnd()
val target = readSymbolRef()
target.addAnnotation(readAnnotationInfo(end))
}
@@ -497,7 +497,7 @@ abstract class UnPickler {
val tag = readByte()
if (tag != ANNOTINFO)
errorBadSignature("annotation expected (" + tag + ")")
- val end = readNat() + readIndex
+ val end = readEnd()
readAnnotationInfo(end)
}
@@ -506,7 +506,7 @@ abstract class UnPickler {
val outerTag = readByte()
if (outerTag != TREE)
errorBadSignature("tree expected (" + outerTag + ")")
- val end = readNat() + readIndex
+ val end = readEnd()
val tag = readByte()
val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
@@ -517,18 +517,18 @@ abstract class UnPickler {
var mods: Modifiers = null
var name: Name = null
- /** Read a Symbol, Modifiers, and a Name */
+ /* Read a Symbol, Modifiers, and a Name */
def setSymModsName() {
symbol = readSymbolRef()
mods = readModifiersRef()
name = readNameRef()
}
- /** Read a Symbol and a Name */
+ /* Read a Symbol and a Name */
def setSymName() {
symbol = readSymbolRef()
name = readNameRef()
}
- /** Read a Symbol */
+ /* Read a Symbol */
def setSym() {
symbol = readSymbolRef()
}
@@ -764,7 +764,8 @@ abstract class UnPickler {
val tag = readNat()
if (tag != MODIFIERS)
errorBadSignature("expected a modifiers tag (" + tag + ")")
- val end = readNat() + readIndex
+
+ readEnd()
val pflagsHi = readNat()
val pflagsLo = readNat()
val pflags = (pflagsHi.toLong << 32) + pflagsLo
@@ -796,7 +797,6 @@ abstract class UnPickler {
protected def readTreeRef(): Tree = at(readNat(), readTree)
protected def readTypeNameRef(): TypeName = readNameRef().toTypeName
- protected def readTermNameRef(): TermName = readNameRef().toTermName
protected def readTemplateRef(): Template =
readTreeRef() match {
@@ -843,7 +843,6 @@ abstract class UnPickler {
* error reporting, so we rely on the typechecker to report the error).
*/
def toTypeError(e: MissingRequirementError) = {
- // e.printStackTrace()
new TypeError(e.msg)
}
@@ -853,7 +852,8 @@ abstract class UnPickler {
private val p = phase
override def complete(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
- atPhase(p) (sym setInfo tp)
+ if (p ne null)
+ enteringPhase(p) (sym setInfo tp)
if (currentRunId != definedAtRunId)
sym.setInfo(adaptToNewRunMap(tp))
}
@@ -871,7 +871,7 @@ abstract class UnPickler {
super.complete(sym)
var alias = at(j, readSymbol)
if (alias.isOverloaded)
- alias = atPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+ alias = enteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
sym.asInstanceOf[TermSymbol].setAlias(alias)
}
diff --git a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
index a6fb4187ca..859f703d97 100644
--- a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect.internal
+package scala
+package reflect.internal
package settings
/** A Settings abstraction boiled out of the original highly mutable Settings
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 81368df7a6..e21e95903b 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -4,7 +4,8 @@
*/
// $Id$
-package scala.reflect.internal
+package scala
+package reflect.internal
package settings
/** A mutable Settings object.
@@ -35,16 +36,22 @@ abstract class MutableSettings extends AbsSettings {
def overrideObjects: BooleanSetting
def printtypes: BooleanSetting
def debug: BooleanSetting
- def Ynotnull: BooleanSetting
def explaintypes: BooleanSetting
def verbose: BooleanSetting
def uniqid: BooleanSetting
def Yshowsymkinds: BooleanSetting
+ def Yposdebug: BooleanSetting
+ def Yrangepos: BooleanSetting
def Xprintpos: BooleanSetting
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
def Xexperimental: BooleanSetting
- def XoldPatmat: BooleanSetting
def XnoPatmatAnalysis: BooleanSetting
def XfullLubs: BooleanSetting
+ def breakCycles: BooleanSetting
+}
+object MutableSettings {
+ import scala.language.implicitConversions
+ /** Support the common use case, `if (settings.debug) println("Hello, martin.")` */
+ @inline implicit def reflectSettingToBoolean(s: MutableSettings#BooleanSetting): Boolean = s.value
}
diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
new file mode 100644
index 0000000000..f879960407
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala
@@ -0,0 +1,51 @@
+package scala
+package reflect
+package internal
+package tpe
+
+private[internal] trait CommonOwners {
+ self: SymbolTable =>
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given type.
+ */
+ protected[internal] def commonOwner(t: Type): Symbol = commonOwner(t :: Nil)
+
+ /** The most deeply nested owner that contains all the symbols
+ * of thistype or prefixless typerefs/singletype occurrences in given list
+ * of types.
+ */
+ protected[internal] def commonOwner(tps: List[Type]): Symbol = {
+ if (tps.isEmpty) NoSymbol
+ else {
+ commonOwnerMap.clear()
+ tps foreach (commonOwnerMap traverse _)
+ if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol
+ }
+ }
+
+ protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj
+
+ protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] {
+ var result: Symbol = _
+
+ def clear() { result = null }
+
+ private def register(sym: Symbol) {
+ // First considered type is the trivial result.
+ if ((result eq null) || (sym eq NoSymbol))
+ result = sym
+ else
+ while ((result ne NoSymbol) && (result ne sym) && !(sym isNestedIn result))
+ result = result.owner
+ }
+ def traverse(tp: Type) = tp.normalize match {
+ case ThisType(sym) => register(sym)
+ case TypeRef(NoPrefix, sym, args) => register(sym.owner) ; args foreach traverse
+ case SingleType(NoPrefix, sym) => register(sym.owner)
+ case _ => mapOver(tp)
+ }
+ }
+
+ private lazy val commonOwnerMapObj = new CommonOwnerMap
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
new file mode 100644
index 0000000000..0a7a2a127c
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -0,0 +1,613 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.mutable
+import scala.annotation.tailrec
+import util.Statistics
+import Variance._
+
+private[internal] trait GlbLubs {
+ self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private final val printLubs = scala.sys.props contains "scalac.debug.lub"
+
+ /** In case anyone wants to turn off lub verification without reverting anything. */
+ private final val verifyLubs = true
+
+
+ private def printLubMatrix(btsMap: Map[Type, List[Type]], depth: Int) {
+ import util.TableDef
+ import TableDef.Column
+ def str(tp: Type) = {
+ if (tp == NoType) ""
+ else {
+ val s = ("" + tp).replaceAll("""[\w.]+\.(\w+)""", "$1")
+ if (s.length < 60) s
+ else (s take 57) + "..."
+ }
+ }
+
+ val sorted = btsMap.toList.sortWith((x, y) => x._1.typeSymbol isLess y._1.typeSymbol)
+ val maxSeqLength = sorted.map(_._2.size).max
+ val padded = sorted map (_._2.padTo(maxSeqLength, NoType))
+ val transposed = padded.transpose
+
+ val columns: List[Column[List[Type]]] = mapWithIndex(sorted) {
+ case ((k, v), idx) =>
+ Column(str(k), (xs: List[Type]) => str(xs(idx)), left = true)
+ }
+
+ val tableDef = TableDef(columns: _*)
+ val formatted = tableDef.table(transposed)
+ println("** Depth is " + depth + "\n" + formatted)
+ }
+
+ /** From a list of types, find any which take type parameters
+ * where the type parameter bounds contain references to other
+ * any types in the list (including itself.)
+ *
+ * @return List of symbol pairs holding the recursive type
+ * parameter and the parameter which references it.
+ */
+ def findRecursiveBounds(ts: List[Type]): List[(Symbol, Symbol)] = {
+ if (ts.isEmpty) Nil
+ else {
+ val sym = ts.head.typeSymbol
+ require(ts.tail forall (_.typeSymbol == sym), ts)
+ for (p <- sym.typeParams ; in <- sym.typeParams ; if in.info.bounds contains p) yield
+ p -> in
+ }
+ }
+
+ /** Given a matrix `tsBts` whose columns are basetype sequences (and the symbols `tsParams` that should be interpreted as type parameters in this matrix),
+ * compute its least sorted upwards closed upper bound relative to the following ordering <= between lists of types:
+ *
+ * xs <= ys iff forall y in ys exists x in xs such that x <: y
+ *
+ * @arg tsParams for each type in the original list of types `ts0`, its list of type parameters (if that type is a type constructor)
+ * (these type parameters may be referred to by type arguments in the BTS column of those types,
+ * and must be interpreted as bound variables; i.e., under a type lambda that wraps the types that refer to these type params)
+ * @arg tsBts a matrix whose columns are basetype sequences
+ * the first row is the original list of types for which we're computing the lub
+ * (except that type constructors have been applied to their dummyArgs)
+ * @See baseTypeSeq for a definition of sorted and upwards closed.
+ */
+ def lubList(ts: List[Type], depth: Int): List[Type] = {
+ var lubListDepth = 0
+ // This catches some recursive situations which would otherwise
+ // befuddle us, e.g. pos/hklub0.scala
+ def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol))
+
+ def elimHigherOrderTypeParam(tp: Type) = tp match {
+ case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) =>
+ logResult("Retracting dummies from " + tp + " in lublist")(tp.typeConstructor)
+ case _ => tp
+ }
+ // pretypes is a tail-recursion-preserving accumulator.
+ @tailrec def loop(pretypes: List[Type], tsBts: List[List[Type]]): List[Type] = {
+ lubListDepth += 1
+
+ if (tsBts.isEmpty || (tsBts exists typeListIsEmpty)) pretypes.reverse
+ else if (tsBts.tail.isEmpty) pretypes.reverse ++ tsBts.head
+ else {
+ // ts0 is the 1-dimensional frontier of symbols cutting through 2-dimensional tsBts.
+ // Invariant: all symbols "under" (closer to the first row) the frontier
+ // are smaller (according to _.isLess) than the ones "on and beyond" the frontier
+ val ts0 = tsBts map (_.head)
+
+ // Is the frontier made up of types with the same symbol?
+ val isUniformFrontier = (ts0: @unchecked) match {
+ case t :: ts => ts forall (_.typeSymbol == t.typeSymbol)
+ }
+
+ // Produce a single type for this frontier by merging the prefixes and arguments of those
+ // typerefs that share the same symbol: that symbol is the current maximal symbol for which
+ // the invariant holds, i.e., the one that conveys most information regarding subtyping. Before
+ // merging, strip targs that refer to bound tparams (when we're computing the lub of type
+ // constructors.) Also filter out all types that are a subtype of some other type.
+ if (isUniformFrontier) {
+ val fbounds = findRecursiveBounds(ts0) map (_._2)
+ val tcLubList = typeConstructorLubList(ts0)
+ def isRecursive(tp: Type) = tp.typeSymbol.typeParams exists fbounds.contains
+
+ val ts1 = ts0 map { t =>
+ if (isRecursive(t)) {
+ tcLubList map (t baseType _.typeSymbol) find (t => !isRecursive(t)) match {
+ case Some(tp) => logResult(s"Breaking recursion in lublist, substituting weaker type.\n Was: $t\n Now")(tp)
+ case _ => t
+ }
+ }
+ else t
+ }
+ val tails = tsBts map (_.tail)
+ mergePrefixAndArgs(elimSub(ts1, depth) map elimHigherOrderTypeParam, Covariant, depth) match {
+ case Some(tp) => loop(tp :: pretypes, tails)
+ case _ => loop(pretypes, tails)
+ }
+ }
+ else {
+ // frontier is not uniform yet, move it beyond the current minimal symbol;
+ // lather, rinSe, repeat
+ val sym = minSym(ts0)
+ val newtps = tsBts map (ts => if (ts.head.typeSymbol == sym) ts.tail else ts)
+ if (printLubs) {
+ val str = (newtps.zipWithIndex map { case (tps, idx) =>
+ tps.map(" " + _ + "\n").mkString(" (" + idx + ")\n", "", "\n")
+ }).mkString("")
+
+ println("Frontier(\n" + str + ")")
+ printLubMatrix((ts zip tsBts).toMap, lubListDepth)
+ }
+
+ loop(pretypes, newtps)
+ }
+ }
+ }
+
+ val initialBTSes = ts map (_.baseTypeSeq.toList)
+ if (printLubs)
+ printLubMatrix((ts zip initialBTSes).toMap, depth)
+
+ loop(Nil, initialBTSes)
+ }
+
+ /** The minimal symbol of a list of types (as determined by `Symbol.isLess`). */
+ private def minSym(tps: List[Type]): Symbol =
+ (tps.head.typeSymbol /: tps.tail) {
+ (sym1, tp2) => if (tp2.typeSymbol isLess sym1) tp2.typeSymbol else sym1
+ }
+
+ /** A minimal type list which has a given list of types as its base type sequence */
+ def spanningTypes(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case first :: rest =>
+ first :: spanningTypes(
+ rest filter (t => !first.typeSymbol.isSubClass(t.typeSymbol)))
+ }
+
+ /** Eliminate from list of types all elements which are a supertype
+ * of some other element of the list. */
+ private def elimSuper(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSuper(ts1 filter (t1 => !(t <:< t1)))
+ if (rest exists (t1 => t1 <:< t)) rest else t :: rest
+ }
+
+ /** Eliminate from list of types all elements which are a subtype
+ * of some other element of the list. */
+ private def elimSub(ts: List[Type], depth: Int): List[Type] = {
+ def elimSub0(ts: List[Type]): List[Type] = ts match {
+ case List() => List()
+ case List(t) => List(t)
+ case t :: ts1 =>
+ val rest = elimSub0(ts1 filter (t1 => !isSubType(t1, t, decr(depth))))
+ if (rest exists (t1 => isSubType(t, t1, decr(depth)))) rest else t :: rest
+ }
+ val ts0 = elimSub0(ts)
+ if (ts0.isEmpty || ts0.tail.isEmpty) ts0
+ else {
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
+ if (ts1 eq ts0) ts0
+ else elimSub(ts1, depth)
+ }
+ }
+
+ private def stripExistentialsAndTypeVars(ts: List[Type]): (List[Type], List[Symbol]) = {
+ val quantified = ts flatMap {
+ case ExistentialType(qs, _) => qs
+ case t => List()
+ }
+ def stripType(tp: Type): Type = tp match {
+ case ExistentialType(_, res) =>
+ res
+ case tv@TypeVar(_, constr) =>
+ if (tv.instValid) stripType(constr.inst)
+ else if (tv.untouchable) tv
+ else abort("trying to do lub/glb of typevar "+tp)
+ case t => t
+ }
+ val strippedTypes = ts mapConserve stripType
+ (strippedTypes, quantified)
+ }
+
+ /** Does this set of types have the same weak lub as
+ * it does regular lub? This is exposed so lub callers
+ * can discover whether the trees they are typing will
+ * may require further adaptation. It may return false
+ * negatives, but it will not return false positives.
+ */
+ def sameWeakLubAsLub(tps: List[Type]) = tps match {
+ case Nil => true
+ case tp :: Nil => !typeHasAnnotations(tp)
+ case tps => !(tps exists typeHasAnnotations) && !(tps forall isNumericValueType)
+ }
+
+ /** If the arguments are all numeric value types, the numeric
+ * lub according to the weak conformance spec. If any argument
+ * has type annotations, take the lub of the unannotated type
+ * and call the analyzerPlugin method annotationsLub so it can
+ * be further altered. Otherwise, the regular lub.
+ */
+ def weakLub(tps: List[Type]): Type = (
+ if (tps.isEmpty)
+ NothingTpe
+ else if (tps forall isNumericValueType)
+ numericLub(tps)
+ else if (tps exists typeHasAnnotations)
+ annotationsLub(lub(tps map (_.withoutAnnotations)), tps)
+ else
+ lub(tps)
+ )
+
+ def numericLub(ts: List[Type]) =
+ ts reduceLeft ((t1, t2) =>
+ if (isNumericSubType(t1, t2)) t2
+ else if (isNumericSubType(t2, t1)) t1
+ else IntTpe)
+
+ private val lubResults = new mutable.HashMap[(Int, List[Type]), Type]
+ private val glbResults = new mutable.HashMap[(Int, List[Type]), Type]
+
+ /** Given a list of types, finds all the base classes they have in
+ * common, then returns a list of type constructors derived directly
+ * from the symbols (so any more specific type information is ignored.)
+ * The list is filtered such that every type constructor in the list
+ * expects the same number of type arguments, which is chosen based
+ * on the deepest class among the common baseclasses.
+ */
+ def typeConstructorLubList(ts: List[Type]): List[Type] = {
+ val bcs = ts.flatMap(_.baseClasses).distinct sortWith (_ isLess _)
+ val tcons = bcs filter (clazz => ts forall (_.typeSymbol isSubClass clazz))
+
+ tcons map (_.typeConstructor) match {
+ case Nil => Nil
+ case t :: ts => t :: ts.filter(_.typeParams.size == t.typeParams.size)
+ }
+ }
+
+ def lub(ts: List[Type]): Type = ts match {
+ case Nil => NothingTpe
+ case t :: Nil => t
+ case _ =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ val res = lub(ts, lubDepth(ts))
+ // If the number of unapplied type parameters in all incoming
+ // types is consistent, and the lub does not match that, return
+ // the type constructor of the calculated lub instead. This
+ // is because lubbing type constructors tends to result in types
+ // which have been applied to dummies or Nothing.
+ ts.map(_.typeParams.size).distinct match {
+ case x :: Nil if res.typeParams.size != x =>
+ logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor)
+ case _ =>
+ res
+ }
+ }
+ finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ /** The least upper bound wrt <:< of a list of types */
+ protected[internal] def lub(ts: List[Type], depth: Int): Type = {
+ def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match {
+ case List() => NothingTpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(glb(bounds, depth)))
+ PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1)))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, lub0(matchingRestypes(ts, mt.paramTypes)))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
+ case ts =>
+ lubResults get ((depth, ts)) match {
+ case Some(lubType) =>
+ lubType
+ case None =>
+ lubResults((depth, ts)) = AnyTpe
+ val res = if (depth < 0) AnyTpe else lub1(ts)
+ lubResults((depth, ts)) = res
+ res
+ }
+ }
+ def lub1(ts0: List[Type]): Type = {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val lubBaseTypes: List[Type] = lubList(ts, depth)
+ val lubParents = spanningTypes(lubBaseTypes)
+ val lubOwner = commonOwner(ts)
+ val lubBase = intersectionType(lubParents, lubOwner)
+ val lubType =
+ if (phase.erasedTypes || depth == 0 ) lubBase
+ else {
+ val lubRefined = refinedType(lubParents, lubOwner)
+ val lubThisType = lubRefined.typeSymbol.thisType
+ val narrowts = ts map (_.narrow)
+ def excludeFromLub(sym: Symbol) = (
+ sym.isClass
+ || sym.isConstructor
+ || !sym.isPublic
+ || isGetClass(sym)
+ || sym.isFinal
+ || narrowts.exists(t => !refines(t, sym))
+ )
+ def lubsym(proto: Symbol): Symbol = {
+ val prototp = lubThisType.memberInfo(proto)
+ val syms = narrowts map (t =>
+ t.nonPrivateMember(proto.name).suchThat(sym =>
+ sym.tpe matches prototp.substThis(lubThisType.typeSymbol, t)))
+
+ if (syms contains NoSymbol) NoSymbol
+ else {
+ val symtypes =
+ map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
+ if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth)))
+ else if (symtypes.tail forall (symtypes.head =:= _))
+ proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head)
+ else {
+ def lubBounds(bnds: List[TypeBounds]): TypeBounds =
+ TypeBounds(glb(bnds map (_.lo), decr(depth)), lub(bnds map (_.hi), decr(depth)))
+ lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos)
+ .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds)))
+ }
+ }
+ }
+ def refines(tp: Type, sym: Symbol): Boolean = {
+ val syms = tp.nonPrivateMember(sym.name).alternatives
+ !syms.isEmpty && (syms forall (alt =>
+ // todo alt != sym is strictly speaking not correct, but without it we lose
+ // efficiency.
+ alt != sym && !specializesSym(lubThisType, sym, tp, alt, depth)))
+ }
+ // add a refinement symbol for all non-class members of lubBase
+ // which are refined by every type in ts.
+ for (sym <- lubBase.nonPrivateMembers ; if !excludeFromLub(sym)) {
+ try lubsym(sym) andAlso (addMember(lubThisType, lubRefined, _, depth))
+ catch {
+ case ex: NoCommonType =>
+ }
+ }
+ if (lubRefined.decls.isEmpty) lubBase
+ else if (!verifyLubs) lubRefined
+ else {
+ // Verify that every given type conforms to the calculated lub.
+ // In theory this should not be necessary, but higher-order type
+ // parameters are not handled correctly.
+ val ok = ts forall { t =>
+ isSubType(t, lubRefined, depth) || {
+ if (settings.debug || printLubs) {
+ Console.println(
+ "Malformed lub: " + lubRefined + "\n" +
+ "Argument " + t + " does not conform. Falling back to " + lubBase
+ )
+ }
+ false
+ }
+ }
+ // If not, fall back on the more conservative calculation.
+ if (ok) lubRefined
+ else lubBase
+ }
+ }
+ // dropIllegalStarTypes is a localized fix for SI-6897. We should probably
+ // integrate that transformation at a lower level in master, but lubs are
+ // the likely and maybe only spot they escape, so fixing here for 2.10.1.
+ existentialAbstraction(tparams, dropIllegalStarTypes(lubType))
+ }
+ if (printLubs) {
+ println(indent + "lub of " + ts + " at depth "+depth)//debug
+ indent = indent + " "
+ assert(indent.length <= 100)
+ }
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ val res = lub0(ts)
+ if (printLubs) {
+ indent = indent stripSuffix " "
+ println(indent + "lub of " + ts + " is " + res)//debug
+ }
+ res
+ }
+
+ val GlbFailure = new Throwable
+
+ /** A global counter for glb calls in the `specializes` query connected to the `addMembers`
+ * call in `glb`. There's a possible infinite recursion when `specializes` calls
+ * memberType, which calls baseTypeSeq, which calls mergePrefixAndArgs, which calls glb.
+ * The counter breaks this recursion after two calls.
+ * If the recursion is broken, no member is added to the glb.
+ */
+ private var globalGlbDepth = 0
+ private final val globalGlbLimit = 2
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`). */
+ def glb(ts: List[Type]): Type = elimSuper(ts) match {
+ case List() => AnyTpe
+ case List(t) => t
+ case ts0 =>
+ if (Statistics.canEnable) Statistics.incCounter(lubCount)
+ val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null
+ try {
+ glbNorm(ts0, lubDepth(ts0))
+ } finally {
+ lubResults.clear()
+ glbResults.clear()
+ if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start)
+ }
+ }
+
+ protected[internal] def glb(ts: List[Type], depth: Int): Type = elimSuper(ts) match {
+ case List() => AnyTpe
+ case List(t) => t
+ case ts0 => glbNorm(ts0, depth)
+ }
+
+ /** The greatest lower bound of a list of types (as determined by `<:<`), which have been normalized
+ * with regard to `elimSuper`. */
+ protected def glbNorm(ts: List[Type], depth: Int): Type = {
+ def glb0(ts0: List[Type]): Type = ts0 match {
+ case List() => AnyTpe
+ case List(t) => t
+ case ts @ PolyType(tparams, _) :: _ =>
+ val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) =>
+ tparam.cloneSymbol.setInfo(lub(bounds, depth)))
+ PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth))
+ case ts @ (mt @ MethodType(params, _)) :: rest =>
+ MethodType(params, glbNorm(matchingRestypes(ts, mt.paramTypes), depth))
+ case ts @ NullaryMethodType(_) :: rest =>
+ NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth))
+ case ts @ TypeBounds(_, _) :: rest =>
+ TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth))
+ case ts =>
+ glbResults get ((depth, ts)) match {
+ case Some(glbType) =>
+ glbType
+ case _ =>
+ glbResults((depth, ts)) = NothingTpe
+ val res = if (depth < 0) NothingTpe else glb1(ts)
+ glbResults((depth, ts)) = res
+ res
+ }
+ }
+ def glb1(ts0: List[Type]): Type = {
+ try {
+ val (ts, tparams) = stripExistentialsAndTypeVars(ts0)
+ val glbOwner = commonOwner(ts)
+ def refinedToParents(t: Type): List[Type] = t match {
+ case RefinedType(ps, _) => ps flatMap refinedToParents
+ case _ => List(t)
+ }
+ def refinedToDecls(t: Type): List[Scope] = t match {
+ case RefinedType(ps, decls) =>
+ val dss = ps flatMap refinedToDecls
+ if (decls.isEmpty) dss else decls :: dss
+ case _ => List()
+ }
+ val ts1 = ts flatMap refinedToParents
+ val glbBase = intersectionType(ts1, glbOwner)
+ val glbType =
+ if (phase.erasedTypes || depth == 0) glbBase
+ else {
+ val glbRefined = refinedType(ts1, glbOwner)
+ val glbThisType = glbRefined.typeSymbol.thisType
+ def glbsym(proto: Symbol): Symbol = {
+ val prototp = glbThisType.memberInfo(proto)
+ val syms = for (t <- ts;
+ alt <- (t.nonPrivateMember(proto.name).alternatives)
+ if glbThisType.memberInfo(alt) matches prototp
+ ) yield alt
+ val symtypes = syms map glbThisType.memberInfo
+ assert(!symtypes.isEmpty)
+ proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted(
+ if (proto.isTerm) glb(symtypes, decr(depth))
+ else {
+ def isTypeBound(tp: Type) = tp match {
+ case TypeBounds(_, _) => true
+ case _ => false
+ }
+ def glbBounds(bnds: List[Type]): TypeBounds = {
+ val lo = lub(bnds map (_.bounds.lo), decr(depth))
+ val hi = glb(bnds map (_.bounds.hi), decr(depth))
+ if (lo <:< hi) TypeBounds(lo, hi)
+ else throw GlbFailure
+ }
+ val symbounds = symtypes filter isTypeBound
+ var result: Type =
+ if (symbounds.isEmpty)
+ TypeBounds.empty
+ else glbBounds(symbounds)
+ for (t <- symtypes if !isTypeBound(t))
+ if (result.bounds containsType t) result = t
+ else throw GlbFailure
+ result
+ })
+ }
+ if (globalGlbDepth < globalGlbLimit)
+ try {
+ globalGlbDepth += 1
+ val dss = ts flatMap refinedToDecls
+ for (ds <- dss; sym <- ds.iterator)
+ if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth))
+ try {
+ addMember(glbThisType, glbRefined, glbsym(sym), depth)
+ } catch {
+ case ex: NoCommonType =>
+ }
+ } finally {
+ globalGlbDepth -= 1
+ }
+ if (glbRefined.decls.isEmpty) glbBase else glbRefined
+ }
+ existentialAbstraction(tparams, glbType)
+ } catch {
+ case GlbFailure =>
+ if (ts forall (t => NullTpe <:< t)) NullTpe
+ else NothingTpe
+ }
+ }
+ // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
+ if (Statistics.canEnable) Statistics.incCounter(nestedLubCount)
+ glb0(ts)
+ // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of list of bounds infos, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingBounds(tps: List[Type], tparams: List[Symbol]): List[List[Type]] = {
+ def getBounds(tp: Type): List[Type] = tp match {
+ case PolyType(tparams1, _) if sameLength(tparams1, tparams) =>
+ tparams1 map (tparam => tparam.info.substSym(tparams1, tparams))
+ case tp =>
+ if (tp ne tp.normalize) getBounds(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map getBounds
+ }
+
+ /** All types in list must be polytypes with type parameter lists of
+ * same length as tparams.
+ * Returns list of instance types, where corresponding type
+ * parameters are renamed to tparams.
+ */
+ private def matchingInstTypes(tps: List[Type], tparams: List[Symbol]): List[Type] = {
+ def transformResultType(tp: Type): Type = tp match {
+ case PolyType(tparams1, restpe) if sameLength(tparams1, tparams) =>
+ restpe.substSym(tparams1, tparams)
+ case tp =>
+ if (tp ne tp.normalize) transformResultType(tp.normalize)
+ else throw new NoCommonType(tps)
+ }
+ tps map transformResultType
+ }
+
+ /** All types in list must be method types with equal parameter types.
+ * Returns list of their result types.
+ */
+ private def matchingRestypes(tps: List[Type], pts: List[Type]): List[Type] =
+ tps map {
+ case mt @ MethodType(params1, res) if isSameTypes(mt.paramTypes, pts) =>
+ res
+ case NullaryMethodType(res) if pts.isEmpty =>
+ res
+ case _ =>
+ throw new NoCommonType(tps)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
new file mode 100644
index 0000000000..63f17dff34
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -0,0 +1,581 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable }
+import util.{ Statistics, TriState }
+import scala.annotation.tailrec
+
+trait TypeComparers {
+ self: SymbolTable =>
+ import definitions._
+ import TypesStats._
+
+ private final val LogPendingSubTypesThreshold = DefaultLogThreshhold
+
+ private val pendingSubTypes = new mutable.HashSet[SubTypePair]
+
+ class SubTypePair(val tp1: Type, val tp2: Type) {
+ override def hashCode = tp1.hashCode * 41 + tp2.hashCode
+ override def equals(other: Any) = (this eq other.asInstanceOf[AnyRef]) || (other match {
+ // suspend TypeVars in types compared by =:=,
+ // since we don't want to mutate them simply to check whether a subtype test is pending
+ // in addition to making subtyping "more correct" for type vars,
+ // it should avoid the stackoverflow that's been plaguing us (https://groups.google.com/d/topic/scala-internals/2gHzNjtB4xA/discussion)
+ // this method is only called when subtyping hits a recursion threshold (subsametypeRecursions >= LogPendingSubTypesThreshold)
+ case stp: SubTypePair =>
+ val tvars = List(tp1, stp.tp1, tp2, stp.tp2) flatMap (t => if (t.isGround) Nil else typeVarsInType(t))
+ suspendingTypeVars(tvars)(tp1 =:= stp.tp1 && tp2 =:= stp.tp2)
+ case _ =>
+ false
+ })
+ override def toString = tp1+" <:<? "+tp2
+ }
+
+ private var subsametypeRecursions: Int = 0
+
+ private def isUnifiable(pre1: Type, pre2: Type) =
+ (beginsWithTypeVarOrIsRefined(pre1) || beginsWithTypeVarOrIsRefined(pre2)) && (pre1 =:= pre2)
+
+ /** Returns true iff we are past phase specialize,
+ * sym1 and sym2 are two existential skolems with equal names and bounds,
+ * and pre1 and pre2 are equal prefixes
+ */
+ private def isSameSpecializedSkolem(sym1: Symbol, sym2: Symbol, pre1: Type, pre2: Type) = {
+ sym1.isExistentialSkolem && sym2.isExistentialSkolem &&
+ sym1.name == sym2.name &&
+ phase.specialized &&
+ sym1.info =:= sym2.info &&
+ pre1 =:= pre2
+ }
+
+ private def isSubPre(pre1: Type, pre2: Type, sym: Symbol) =
+ if ((pre1 ne pre2) && (pre1 ne NoPrefix) && (pre2 ne NoPrefix) && pre1 <:< pre2) {
+ if (settings.debug) println(s"new isSubPre $sym: $pre1 <:< $pre2")
+ true
+ } else
+ false
+
+ private def equalSymsAndPrefixes(sym1: Symbol, pre1: Type, sym2: Symbol, pre2: Type): Boolean = (
+ if (sym1 == sym2)
+ sym1.hasPackageFlag || sym1.owner.hasPackageFlag || phase.erasedTypes || pre1 =:= pre2
+ else
+ (sym1.name == sym2.name) && isUnifiable(pre1, pre2)
+ )
+
+
+ def isDifferentType(tp1: Type, tp2: Type): Boolean = try {
+ subsametypeRecursions += 1
+ undoLog undo { // undo type constraints that arise from operations in this block
+ !isSameType1(tp1, tp2)
+ }
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ def isDifferentTypeConstructor(tp1: Type, tp2: Type) = !isSameTypeConstructor(tp1, tp2)
+
+ private def isSameTypeConstructor(tr1: TypeRef, tr2: TypeRef): Boolean = (
+ (tr1.sym == tr2.sym)
+ && !isDifferentType(tr1.pre, tr2.pre)
+ )
+ private def isSameTypeConstructor(tp1: Type, tp2: Type): Boolean = (
+ tp1.isInstanceOf[TypeRef]
+ && tp2.isInstanceOf[TypeRef]
+ && isSameTypeConstructor(tp1.asInstanceOf[TypeRef], tp2.asInstanceOf[TypeRef])
+ )
+
+ /** Do `tp1` and `tp2` denote equivalent types? */
+ def isSameType(tp1: Type, tp2: Type): Boolean = try {
+ if (Statistics.canEnable) Statistics.incCounter(sametypeCount)
+ subsametypeRecursions += 1
+ //OPT cutdown on Function0 allocation
+ //was:
+ // undoLog undoUnless {
+ // isSameType1(tp1, tp2)
+ // }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+ try {
+ result = isSameType1(tp1, tp2)
+ }
+ finally if (!result) undoLog.undoTo(before)
+ result
+ }
+ finally undoLog.unlock()
+ }
+ finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ // @pre: at least one argument has annotations
+ private def sameAnnotatedTypes(tp1: Type, tp2: Type) = (
+ annotationsConform(tp1, tp2)
+ && annotationsConform(tp2, tp1)
+ && (tp1.withoutAnnotations =:= tp2.withoutAnnotations)
+ )
+ // We flush out any AnnotatedTypes before calling isSameType2 because
+ // unlike most other subclasses of Type, we have to allow for equivalence of any
+ // combination of { tp1, tp2 } { is, is not } an AnnotatedType - this because the
+ // logic of "annotationsConform" is arbitrary and unknown.
+ private def isSameType1(tp1: Type, tp2: Type): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => sameAnnotatedTypes(tp1, tp2)
+ case _ => isSameType2(tp1, tp2)
+ }
+
+ private def isSameHKTypes(tp1: Type, tp2: Type) = (
+ tp1.isHigherKinded
+ && tp2.isHigherKinded
+ && (tp1.normalize =:= tp2.normalize)
+ )
+ private def isSameTypeRef(tr1: TypeRef, tr2: TypeRef) = (
+ equalSymsAndPrefixes(tr1.sym, tr1.pre, tr2.sym, tr2.pre)
+ && (isSameHKTypes(tr1, tr2) || isSameTypes(tr1.args, tr2.args))
+ )
+
+ private def isSameSingletonType(tp1: SingletonType, tp2: SingletonType): Boolean = {
+ // We don't use dealiasWiden here because we are looking for the SAME type,
+ // and widening leads to a less specific type. The logic is along the lines of
+ // dealiasAndFollowUnderlyingAsLongAsTheTypeIsEquivalent. This method is only
+ // called after a surface comparison has failed, so if chaseDealiasedUnderlying
+ // does not produce a type other than tp1 and tp2, return false.
+ @tailrec def chaseDealiasedUnderlying(tp: Type): Type = tp.underlying.dealias match {
+ case next: SingletonType if tp ne next => chaseDealiasedUnderlying(next)
+ case _ => tp
+ }
+ val origin1 = chaseDealiasedUnderlying(tp1)
+ val origin2 = chaseDealiasedUnderlying(tp2)
+ ((origin1 ne tp1) || (origin2 ne tp2)) && (origin1 =:= origin2)
+ }
+
+ private def isSameMethodType(mt1: MethodType, mt2: MethodType) = (
+ isSameTypes(mt1.paramTypes, mt2.paramTypes)
+ && (mt1.resultType =:= mt2.resultType.substSym(mt2.params, mt1.params))
+ && (mt1.isImplicit == mt2.isImplicit)
+ )
+
+ private def equalTypeParamsAndResult(tparams1: List[Symbol], res1: Type, tparams2: List[Symbol], res2: Type) = {
+ def subst(info: Type) = info.substSym(tparams2, tparams1)
+ // corresponds does not check length of two sequences before checking the predicate,
+ // but SubstMap assumes it has been checked (SI-2956)
+ ( sameLength(tparams1, tparams2)
+ && (tparams1 corresponds tparams2)((p1, p2) => p1.info =:= subst(p2.info))
+ && (res1 =:= subst(res2))
+ )
+ }
+
+ def isSameType2(tp1: Type, tp2: Type): Boolean = {
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSameType(lhs, rhs)
+
+ /* Here we highlight those unfortunate type-like constructs which
+ * are hidden bundles of mutable state, cruising the type system picking
+ * up any type constraints naive enough to get into their hot rods.
+ */
+ def mutateNonTypeConstructs(lhs: Type, rhs: Type) = lhs match {
+ case BoundedWildcardType(bounds) => bounds containsType rhs
+ case tv @ TypeVar(_, _) => tv.registerTypeEquality(rhs, typeVarLHS = lhs eq tp1)
+ case TypeRef(tv @ TypeVar(_, _), sym, _) => tv.registerTypeSelection(sym, rhs)
+ case _ => false
+ }
+ /* SingletonType receives this additional scrutiny because there are
+ * a variety of Types which must be treated as equivalent even if they
+ * arrive in different guises. For instance, object Foo in the following
+ * might appear in (at least) the four given below.
+ *
+ * package pkg { object Foo ; type Bar = Foo.type }
+ *
+ * ModuleClassTypeRef(pkg.type, Foo: ModuleClassSymbol, Nil)
+ * ThisType(Foo: ModuleClassSymbol)
+ * SingleType(pkg.type, Foo: ModuleSymbol)
+ * AliasTypeRef(NoPrefix, sym: AliasSymbol, Nil) where sym.info is one of the above
+ */
+ def sameSingletonType = tp1 match {
+ case tp1: SingletonType => tp2 match {
+ case tp2: SingletonType => isSameSingletonType(tp1, tp2)
+ case _ => false
+ }
+ case _ => false
+ }
+ /* Those false cases certainly are ugly. There's a proposed SIP to deuglify it.
+ * https://docs.google.com/a/improving.org/document/d/1onPrzSqyDpHScc9PS_hpxJwa3FlPtthxw-bAuuEe8uA
+ */
+ def sameTypeAndSameCaseClass = tp1 match {
+ case tp1: TypeRef => tp2 match { case tp2: TypeRef => isSameTypeRef(tp1, tp2) ; case _ => false }
+ case tp1: MethodType => tp2 match { case tp2: MethodType => isSameMethodType(tp1, tp2) ; case _ => false }
+ case RefinedType(ps1, decls1) => tp2 match { case RefinedType(ps2, decls2) => isSameTypes(ps1, ps2) && (decls1 isSameScope decls2) ; case _ => false }
+ case SingleType(pre1, sym1) => tp2 match { case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1, pre1, sym2, pre2) ; case _ => false }
+ case PolyType(ps1, res1) => tp2 match { case PolyType(ps2, res2) => equalTypeParamsAndResult(ps1, res1, ps2, res2) ; case _ => false }
+ case ExistentialType(qs1, res1) => tp2 match { case ExistentialType(qs2, res2) => equalTypeParamsAndResult(qs1, res1, qs2, res2) ; case _ => false }
+ case ThisType(sym1) => tp2 match { case ThisType(sym2) => sym1 == sym2 ; case _ => false }
+ case ConstantType(c1) => tp2 match { case ConstantType(c2) => c1 == c2 ; case _ => false }
+ case NullaryMethodType(res1) => tp2 match { case NullaryMethodType(res2) => res1 =:= res2 ; case _ => false }
+ case TypeBounds(lo1, hi1) => tp2 match { case TypeBounds(lo2, hi2) => lo1 =:= lo2 && hi1 =:= hi2 ; case _ => false }
+ case _ => false
+ }
+
+ ( sameTypeAndSameCaseClass
+ || sameSingletonType
+ || mutateNonTypeConstructs(tp1, tp2)
+ || mutateNonTypeConstructs(tp2, tp1)
+ || retry(normalizePlus(tp1), normalizePlus(tp2))
+ )
+ }
+
+ def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, AnyDepth)
+
+ def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = try {
+ subsametypeRecursions += 1
+
+ //OPT cutdown on Function0 allocation
+ //was:
+ // undoLog undoUnless { // if subtype test fails, it should not affect constraints on typevars
+ // if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ // val p = new SubTypePair(tp1, tp2)
+ // if (pendingSubTypes(p))
+ // false
+ // else
+ // try {
+ // pendingSubTypes += p
+ // isSubType2(tp1, tp2, depth)
+ // } finally {
+ // pendingSubTypes -= p
+ // }
+ // } else {
+ // isSubType2(tp1, tp2, depth)
+ // }
+ // }
+
+ undoLog.lock()
+ try {
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false
+ else
+ try {
+ pendingSubTypes += p
+ isSubType1(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType1(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
+
+ result
+ } finally undoLog.unlock()
+ } finally {
+ subsametypeRecursions -= 1
+ // XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
+ // it doesn't help to keep separate recursion counts for the three methods that now share it
+ // if (subsametypeRecursions == 0) undoLog.clear()
+ }
+
+ /** Check whether the subtype or type equivalence relationship
+ * between the argument is predetermined. Returns a tri-state
+ * value: True means the arguments are always sub/same types,
+ * False means they never are, and Unknown means the caller
+ * will have to figure things out.
+ */
+ private def typeRelationPreCheck(tp1: Type, tp2: Type): TriState = {
+ def isTrue = (
+ (tp1 eq tp2)
+ || isErrorOrWildcard(tp1)
+ || isErrorOrWildcard(tp2)
+ || (tp1 eq NoPrefix) && tp2.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ || (tp2 eq NoPrefix) && tp1.typeSymbol.isPackageClass // !! I do not see how this would be warranted by the spec
+ )
+ // isFalse, assuming !isTrue
+ def isFalse = (
+ (tp1 eq NoType)
+ || (tp2 eq NoType)
+ || (tp1 eq NoPrefix)
+ || (tp2 eq NoPrefix)
+ )
+
+ if (isTrue) TriState.True
+ else if (isFalse) TriState.False
+ else TriState.Unknown
+ }
+
+ private def isSubType1(tp1: Type, tp2: Type, depth: Int): Boolean = typeRelationPreCheck(tp1, tp2) match {
+ case state if state.isKnown => state.booleanValue
+ case _ if typeHasAnnotations(tp1) || typeHasAnnotations(tp2) => annotationsConform(tp1, tp2) && (tp1.withoutAnnotations <:< tp2.withoutAnnotations)
+ case _ => isSubType2(tp1, tp2, depth)
+ }
+
+ private def isPolySubType(tp1: PolyType, tp2: PolyType): Boolean = {
+ val PolyType(tparams1, res1) = tp1
+ val PolyType(tparams2, res2) = tp2
+
+ sameLength(tparams1, tparams2) && {
+ // fast-path: polymorphic method type -- type params cannot be captured
+ val isMethod = tparams1.head.owner.isMethod
+ //@M for an example of why we need to generate fresh symbols otherwise, see neg/tcpoly_ticket2101.scala
+ val substitutes = if (isMethod) tparams1 else cloneSymbols(tparams1)
+ def sub1(tp: Type) = if (isMethod) tp else tp.substSym(tparams1, substitutes)
+ def sub2(tp: Type) = tp.substSym(tparams2, substitutes)
+ def cmp(p1: Symbol, p2: Symbol) = sub2(p2.info) <:< sub1(p1.info)
+
+ (tparams1 corresponds tparams2)(cmp) && (sub1(res1) <:< sub2(res2))
+ }
+ }
+
+ // @assume tp1.isHigherKinded || tp2.isHigherKinded
+ def isHKSubType(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match {
+ case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side
+ case (_, TypeRef(_, NothingClass, _)) => false
+ case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType)
+ case (_: PolyType, MethodType(ps, _)) if ps exists (_.tpe.isWildcard) => false // don't warn on HasMethodMatching on right hand side
+ case _ => // @assume !(both .isHigherKinded) thus cannot be subtypes
+ def tp_s(tp: Type): String = f"$tp%-20s ${util.shortClassOfInstance(tp)}%s"
+ devWarning(s"HK subtype check on $tp1 and $tp2, but both don't normalize to polytypes:\n tp1=${tp_s(ntp1)}\n tp2=${tp_s(ntp2)}")
+ false
+ }
+
+ ( tp1.typeSymbol == NothingClass // @M Nothing is subtype of every well-kinded type
+ || tp2.typeSymbol == AnyClass // @M Any is supertype of every well-kinded type (@PP: is it? What about continuations plugin?)
+ || isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's
+ )
+ }
+
+ /** Does type `tp1` conform to `tp2`? */
+ private def isSubType2(tp1: Type, tp2: Type, depth: Int): Boolean = {
+ def retry(lhs: Type, rhs: Type) = ((lhs ne tp1) || (rhs ne tp2)) && isSubType(lhs, rhs, depth)
+
+ if (isSingleType(tp1) && isSingleType(tp2) || isConstantType(tp1) && isConstantType(tp2))
+ return (tp1 =:= tp2) || retry(tp1.underlying, tp2)
+
+ if (tp1.isHigherKinded || tp2.isHigherKinded)
+ return isHKSubType(tp1, tp2, depth)
+
+ /* First try, on the right:
+ * - unwrap Annotated types, BoundedWildcardTypes,
+ * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
+ * - handle common cases for first-kind TypeRefs on both sides as a fast path.
+ */
+ def firstTry = tp2 match {
+ // fast path: two typerefs, none of them HK
+ case tr2: TypeRef =>
+ tp1 match {
+ case tr1: TypeRef =>
+ // TODO - dedicate a method to TypeRef/TypeRef subtyping.
+ // These typerefs are pattern matched up and down far more
+ // than is necessary.
+ val sym1 = tr1.sym
+ val sym2 = tr2.sym
+ val pre1 = tr1.pre
+ val pre2 = tr2.pre
+ (((if (sym1 == sym2) phase.erasedTypes || sym1.owner.hasPackageFlag || isSubType(pre1, pre2, depth)
+ else (sym1.name == sym2.name && !sym1.isModuleClass && !sym2.isModuleClass &&
+ (isUnifiable(pre1, pre2) ||
+ isSameSpecializedSkolem(sym1, sym2, pre1, pre2) ||
+ sym2.isAbstractType && isSubPre(pre1, pre2, sym2)))) &&
+ isSubArgs(tr1.args, tr2.args, sym1.typeParams, depth))
+ ||
+ sym2.isClass && {
+ val base = tr1 baseType sym2
+ (base ne tr1) && isSubType(base, tr2, depth)
+ }
+ ||
+ thirdTryRef(tr1, tr2))
+ case _ =>
+ secondTry
+ }
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1, bounds.hi, depth)
+ case tv2 @ TypeVar(_, constr2) =>
+ tp1 match {
+ case AnnotatedType(_, _, _) | BoundedWildcardType(_) =>
+ secondTry
+ case _ =>
+ tv2.registerBound(tp1, isLowerBound = true)
+ }
+ case _ =>
+ secondTry
+ }
+
+ /* Second try, on the left:
+ * - unwrap AnnotatedTypes, BoundedWildcardTypes,
+ * - bind typevars,
+ * - handle existential types by skolemization.
+ */
+ def secondTry = tp1 match {
+ case AnnotatedType(_, _, _) =>
+ isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) &&
+ annotationsConform(tp1, tp2)
+ case BoundedWildcardType(bounds) =>
+ isSubType(tp1.bounds.lo, tp2, depth)
+ case tv @ TypeVar(_,_) =>
+ tv.registerBound(tp2, isLowerBound = false)
+ case ExistentialType(_, _) =>
+ try {
+ skolemizationLevel += 1
+ isSubType(tp1.skolemizeExistential, tp2, depth)
+ } finally {
+ skolemizationLevel -= 1
+ }
+ case _ =>
+ thirdTry
+ }
+
+ def thirdTryRef(tp1: Type, tp2: TypeRef): Boolean = {
+ val sym2 = tp2.sym
+ def retry(lhs: Type, rhs: Type) = isSubType(lhs, rhs, depth)
+ def abstractTypeOnRight(lo: Type) = isDifferentTypeConstructor(tp2, lo) && retry(tp1, lo)
+ def classOnRight = (
+ if (isRawType(tp2)) retry(tp1, rawToExistential(tp2))
+ else if (sym2.isRefinementClass) retry(tp1, sym2.info)
+ else fourthTry
+ )
+ sym2 match {
+ case SingletonClass => tp1.isStable || fourthTry
+ case _: ClassSymbol => classOnRight
+ case _: TypeSymbol if sym2.isDeferred => abstractTypeOnRight(tp2.bounds.lo) || fourthTry
+ case _: TypeSymbol => retry(tp1.normalize, tp2.normalize)
+ case _ => fourthTry
+ }
+ }
+
+ /* Third try, on the right:
+ * - decompose refined types.
+ * - handle typerefs and existentials.
+ * - handle left+right method types, polytypes, typebounds
+ */
+ def thirdTry = tp2 match {
+ case tr2: TypeRef =>
+ thirdTryRef(tp1, tr2)
+ case rt2: RefinedType =>
+ (rt2.parents forall (isSubType(tp1, _, depth))) &&
+ (rt2.decls forall (specializesSym(tp1, _, depth)))
+ case et2: ExistentialType =>
+ et2.withTypeVars(isSubType(tp1, _, depth), depth) || fourthTry
+ case mt2: MethodType =>
+ tp1 match {
+ case mt1 @ MethodType(params1, res1) =>
+ val params2 = mt2.params
+ val res2 = mt2.resultType
+ (sameLength(params1, params2) &&
+ mt1.isImplicit == mt2.isImplicit &&
+ matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
+ isSubType(res1.substSym(params1, params2), res2, depth))
+ // TODO: if mt1.params.isEmpty, consider NullaryMethodType?
+ case _ =>
+ false
+ }
+ case pt2 @ NullaryMethodType(_) =>
+ tp1 match {
+ // TODO: consider MethodType mt for which mt.params.isEmpty??
+ case pt1 @ NullaryMethodType(_) =>
+ isSubType(pt1.resultType, pt2.resultType, depth)
+ case _ =>
+ false
+ }
+ case TypeBounds(lo2, hi2) =>
+ tp1 match {
+ case TypeBounds(lo1, hi1) =>
+ isSubType(lo2, lo1, depth) && isSubType(hi1, hi2, depth)
+ case _ =>
+ false
+ }
+ case _ =>
+ fourthTry
+ }
+
+ /* Fourth try, on the left:
+ * - handle typerefs, refined types, and singleton types.
+ */
+ def fourthTry = {
+ def retry(lhs: Type, rhs: Type) = isSubType(lhs, rhs, depth)
+ def abstractTypeOnLeft(hi: Type) = isDifferentTypeConstructor(tp1, hi) && retry(hi, tp2)
+
+ tp1 match {
+ case tr1 @ TypeRef(pre1, sym1, _) =>
+ def nullOnLeft = tp2 match {
+ case TypeRef(_, sym2, _) => sym1 isBottomSubClass sym2
+ case _ => isSingleType(tp2) && retry(tp1, tp2.widen)
+ }
+ def moduleOnLeft = tp2 match {
+ case SingleType(pre2, sym2) => equalSymsAndPrefixes(sym1.sourceModule, pre1, sym2, pre2)
+ case _ => false
+ }
+ def classOnLeft = (
+ if (isRawType(tp1)) retry(rawToExistential(tp1), tp2)
+ else if (sym1.isModuleClass) moduleOnLeft
+ else sym1.isRefinementClass && retry(sym1.info, tp2)
+ )
+ sym1 match {
+ case NothingClass => true
+ case NullClass => nullOnLeft
+ case _: ClassSymbol => classOnLeft
+ case _: TypeSymbol if sym1.isDeferred => abstractTypeOnLeft(tp1.bounds.hi)
+ case _: TypeSymbol => retry(tp1.normalize, tp2.normalize)
+ case _ => false
+ }
+ case RefinedType(parents, _) => parents exists (retry(_, tp2))
+ case _: SingletonType => retry(tp1.underlying, tp2)
+ case _ => false
+ }
+ }
+
+ firstTry
+ }
+
+
+ def isWeakSubType(tp1: Type, tp2: Type) =
+ tp1.dealiasWiden match {
+ case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
+ tp2.deconst.dealias match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ isNumericSubClass(sym1, sym2)
+ case tv2 @ TypeVar(_, _) =>
+ tv2.registerBound(tp1, isLowerBound = true, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case tv1 @ TypeVar(_, _) =>
+ tp2.deconst.dealias match {
+ case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
+ tv1.registerBound(tp2, isLowerBound = false, isNumericBound = true)
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+
+ def isNumericSubType(tp1: Type, tp2: Type) = (
+ isNumericSubClass(primitiveBaseClass(tp1.dealiasWiden), primitiveBaseClass(tp2.dealias))
+ )
+
+ /** If the given type has a primitive class among its base classes,
+ * the symbol of that class. Otherwise, NoSymbol.
+ */
+ private def primitiveBaseClass(tp: Type): Symbol = {
+ @tailrec def loop(bases: List[Symbol]): Symbol = bases match {
+ case Nil => NoSymbol
+ case x :: xs => if (isPrimitiveValueClass(x)) x else loop(xs)
+ }
+ loop(tp.baseClasses)
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
new file mode 100644
index 0000000000..123c296f95
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -0,0 +1,283 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.{ generic }
+import generic.Clearable
+
+
+private[internal] trait TypeConstraints {
+ self: SymbolTable =>
+ import definitions._
+
+ /** A log of type variable with their original constraints. Used in order
+ * to undo constraints in the case of isSubType/isSameType failure.
+ */
+ lazy val undoLog = newUndoLog
+
+ protected def newUndoLog = new UndoLog
+
+ class UndoLog extends Clearable {
+ private type UndoPairs = List[(TypeVar, TypeConstraint)]
+ //OPT this method is public so we can do `manual inlining`
+ var log: UndoPairs = List()
+
+ /*
+ * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
+ *
+ * The idea behind explicit locking mechanism is that all public methods that access mutable state
+ * will have to obtain the lock for their entire execution so both reads and writes can be kept in
+ * right order. Originally, that was achieved by overriding those public methods in
+ * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
+ * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
+ * can go away.
+ *
+ * By using explicit locking we can achieve inlining.
+ *
+ * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
+ * places implementation of `undo` or `undoUnless`). This should be changed back to protected
+ * once inliner is fixed.
+ */
+ def lock(): Unit = ()
+ def unlock(): Unit = ()
+
+ // register with the auto-clearing cache manager
+ perRunCaches.recordCache(this)
+
+ /** Undo all changes to constraints to type variables upto `limit`. */
+ //OPT this method is public so we can do `manual inlining`
+ def undoTo(limit: UndoPairs) {
+ assertCorrectThread()
+ while ((log ne limit) && log.nonEmpty) {
+ val (tv, constr) = log.head
+ tv.constr = constr
+ log = log.tail
+ }
+ }
+
+ /** No sync necessary, because record should only
+ * be called from within an undo or undoUnless block,
+ * which is already synchronized.
+ */
+ private[reflect] def record(tv: TypeVar) = {
+ log ::= ((tv, tv.constr.cloneInternal))
+ }
+
+ def clear() {
+ lock()
+ try {
+ if (settings.debug)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
+ } finally unlock()
+ }
+
+ // `block` should not affect constraints on typevars
+ def undo[T](block: => T): T = {
+ lock()
+ try {
+ val before = log
+
+ try block
+ finally undoTo(before)
+ } finally unlock()
+ }
+ }
+
+ /** @PP: Unable to see why these apparently constant types should need vals
+ * in every TypeConstraint, I lifted them out.
+ */
+ private lazy val numericLoBound = IntTpe
+ private lazy val numericHiBound = intersectionType(List(ByteTpe, CharTpe), ScalaPackageClass)
+
+ /** A class expressing upper and lower bounds constraints of type variables,
+ * as well as their instantiations.
+ */
+ class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) {
+ def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType)
+ def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi))
+ def this() = this(List(), List())
+
+ /* Syncnote: Type constraints are assumed to be used from only one
+ * thread. They are not exposed in api.Types and are used only locally
+ * in operations that are exposed from types. Hence, no syncing of any
+ * variables should be ncessesary.
+ */
+
+ /** Guard these lists against AnyClass and NothingClass appearing,
+ * else loBounds.isEmpty will have different results for an empty
+ * constraint and one with Nothing as a lower bound. [Actually
+ * guarding addLoBound/addHiBound somehow broke raw types so it
+ * only guards against being created with them.]
+ */
+ private var lobounds = lo0 filterNot typeIsNothing
+ private var hibounds = hi0 filterNot typeIsAny
+ private var numlo = numlo0
+ private var numhi = numhi0
+ private var avoidWidening = avoidWidening0
+
+ def loBounds: List[Type] = if (numlo == NoType) lobounds else numlo :: lobounds
+ def hiBounds: List[Type] = if (numhi == NoType) hibounds else numhi :: hibounds
+ def avoidWiden: Boolean = avoidWidening
+
+ def addLoBound(tp: Type, isNumericBound: Boolean = false) {
+ // For some reason which is still a bit fuzzy, we must let Nothing through as
+ // a lower bound despite the fact that Nothing is always a lower bound. My current
+ // supposition is that the side-effecting type constraint accumulation mechanism
+ // depends on these subtype tests being performed to make forward progress when
+ // there are mutally recursive type vars.
+ // See pos/t6367 and pos/t6499 for the competing test cases.
+ val mustConsider = tp.typeSymbol match {
+ case NothingClass => true
+ case _ => !(lobounds contains tp)
+ }
+ if (mustConsider) {
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numlo == NoType || isNumericSubType(numlo, tp))
+ numlo = tp
+ else if (!isNumericSubType(tp, numlo))
+ numlo = numericLoBound
+ }
+ else lobounds ::= tp
+ }
+ }
+
+ def checkWidening(tp: Type) {
+ if(tp.isStable) avoidWidening = true
+ else tp match {
+ case HasTypeMember(_, _) => avoidWidening = true
+ case _ =>
+ }
+ }
+
+ def addHiBound(tp: Type, isNumericBound: Boolean = false) {
+ // My current test case only demonstrates the need to let Nothing through as
+ // a lower bound, but I suspect the situation is symmetrical.
+ val mustConsider = tp.typeSymbol match {
+ case AnyClass => true
+ case _ => !(hibounds contains tp)
+ }
+ if (mustConsider) {
+ checkWidening(tp)
+ if (isNumericBound && isNumericValueType(tp)) {
+ if (numhi == NoType || isNumericSubType(tp, numhi))
+ numhi = tp
+ else if (!isNumericSubType(numhi, tp))
+ numhi = numericHiBound
+ }
+ else hibounds ::= tp
+ }
+ }
+
+ def isWithinBounds(tp: Type): Boolean =
+ lobounds.forall(_ <:< tp) &&
+ hibounds.forall(tp <:< _) &&
+ (numlo == NoType || (numlo weak_<:< tp)) &&
+ (numhi == NoType || (tp weak_<:< numhi))
+
+ var inst: Type = NoType // @M reduce visibility?
+
+ def instValid = (inst ne null) && (inst ne NoType)
+
+ def cloneInternal = {
+ val tc = new TypeConstraint(lobounds, hibounds, numlo, numhi, avoidWidening)
+ tc.inst = inst
+ tc
+ }
+
+ override def toString = {
+ val boundsStr = {
+ val lo = loBounds filterNot typeIsNothing
+ val hi = hiBounds filterNot typeIsAny
+ val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")"))
+ val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")"))
+
+ lostr ++ histr mkString ("[", " | ", "]")
+ }
+ if (inst eq NoType) boundsStr
+ else boundsStr + " _= " + inst.safeToString
+ }
+ }
+
+ /** Solve constraint collected in types `tvars`.
+ *
+ * @param tvars All type variables to be instantiated.
+ * @param tparams The type parameters corresponding to `tvars`
+ * @param variances The variances of type parameters; need to reverse
+ * solution direction for all contravariant variables.
+ * @param upper When `true` search for max solution else min.
+ */
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Variance], upper: Boolean): Boolean =
+ solve(tvars, tparams, variances, upper, AnyDepth)
+
+ def solve(tvars: List[TypeVar], tparams: List[Symbol],
+ variances: List[Variance], upper: Boolean, depth: Int): Boolean = {
+
+ def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) {
+ if (tvar.constr.inst == NoType) {
+ val up = if (variance.isContravariant) !upper else upper
+ tvar.constr.inst = null
+ val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo
+ //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound))
+ var cyclic = bound contains tparam
+ foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => {
+ val ok = (tparam2 != tparam) && (
+ (bound contains tparam2)
+ || up && (tparam2.info.bounds.lo =:= tparam.tpeHK)
+ || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK)
+ )
+ if (ok) {
+ if (tvar2.constr.inst eq null) cyclic = true
+ solveOne(tvar2, tparam2, variance2)
+ }
+ })
+ if (!cyclic) {
+ if (up) {
+ if (bound.typeSymbol != AnyClass) {
+ log(s"$tvar addHiBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.lo.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ } else {
+ if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
+ log(s"$tvar addLoBound $bound.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
+ }
+ for (tparam2 <- tparams)
+ tparam2.info.bounds.hi.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ log(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)")
+ tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
+ }
+ }
+ tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
+
+ //println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen)))
+ val newInst = (
+ if (up) {
+ if (depth != AnyDepth) glb(tvar.constr.hiBounds, depth) else glb(tvar.constr.hiBounds)
+ } else {
+ if (depth != AnyDepth) lub(tvar.constr.loBounds, depth) else lub(tvar.constr.loBounds)
+ }
+ )
+ log(s"$tvar setInst $newInst")
+ tvar setInst newInst
+ //Console.println("solving "+tvar+" "+up+" "+(if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds)+((if (up) (tvar.constr.hiBounds) else tvar.constr.loBounds) map (_.widen))+" = "+tvar.constr.inst)//@MDEBUG
+ }
+ }
+
+ // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
+ foreach3(tvars, tparams, variances)(solveOne)
+ tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst))
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
new file mode 100644
index 0000000000..c35825dcee
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -0,0 +1,1164 @@
+package scala
+package reflect
+package internal
+package tpe
+
+import scala.collection.{ mutable, immutable }
+import Flags._
+import scala.annotation.tailrec
+import Variance._
+
+private[internal] trait TypeMaps {
+ self: SymbolTable =>
+ import definitions._
+
+ /** Normalize any type aliases within this type (@see Type#normalize).
+ * Note that this depends very much on the call to "normalize", not "dealias",
+ * so it is no longer carries the too-stealthy name "deAlias".
+ */
+ object normalizeAliases extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
+ mapOver(logResult(msg)(tp.normalize))
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** Remove any occurrence of type <singleton> from this type and its parents */
+ object dropSingletonType extends TypeMap {
+ def apply(tp: Type): Type = {
+ tp match {
+ case TypeRef(_, SingletonClass, _) =>
+ AnyTpe
+ case tp1 @ RefinedType(parents, decls) =>
+ parents filter (_.typeSymbol != SingletonClass) match {
+ case Nil => AnyTpe
+ case p :: Nil if decls.isEmpty => mapOver(p)
+ case ps => mapOver(copyRefinedType(tp1, ps, decls))
+ }
+ case tp1 =>
+ mapOver(tp1)
+ }
+ }
+ }
+
+ /** Type with all top-level occurrences of abstract types replaced by their bounds */
+ object abstractTypesToBounds extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias)
+ case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi)
+ case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls)
+ case AnnotatedType(_, _, _) => mapOver(tp)
+ case _ => tp // no recursion - top level only
+ }
+ }
+
+ // Set to true for A* => Seq[A]
+ // (And it will only rewrite A* in method result types.)
+ // This is the pre-existing behavior.
+ // Or false for Seq[A] => Seq[A]
+ // (It will rewrite A* everywhere but method parameters.)
+ // This is the specified behavior.
+ protected def etaExpandKeepsStar = false
+
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
+ object dropIllegalStarTypes extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case MethodType(params, restpe) =>
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
+ case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
+ seqType(arg)
+ case _ =>
+ if (etaExpandKeepsStar) tp else mapOver(tp)
+ }
+ }
+
+ trait AnnotationFilter extends TypeMap {
+ def keepAnnotation(annot: AnnotationInfo): Boolean
+
+ override def mapOver(annot: AnnotationInfo) =
+ if (keepAnnotation(annot)) super.mapOver(annot)
+ else UnmappableAnnotation
+ }
+
+ trait KeepOnlyTypeConstraints extends AnnotationFilter {
+ // filter keeps only type constraint annotations
+ def keepAnnotation(annot: AnnotationInfo) = annot matches TypeConstraintClass
+ }
+
+ // todo. move these into scala.reflect.api
+
+ /** A prototype for mapping a function over all possible types
+ */
+ abstract class TypeMap(trackVariance: Boolean) extends (Type => Type) {
+ def this() = this(trackVariance = false)
+ def apply(tp: Type): Type
+
+ private[this] var _variance: Variance = if (trackVariance) Covariant else Invariant
+
+ def variance_=(x: Variance) = { assert(trackVariance, this) ; _variance = x }
+ def variance = _variance
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = tp match {
+ case tr @ TypeRef(pre, sym, args) =>
+ val pre1 = this(pre)
+ val args1 = (
+ if (trackVariance && args.nonEmpty && !variance.isInvariant && sym.typeParams.nonEmpty)
+ mapOverArgs(args, sym.typeParams)
+ else
+ args mapConserve this
+ )
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else copyTypeRef(tp, pre1, tr.coevolveSym(pre1), args1)
+ case ThisType(_) => tp
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else singleType(pre1, sym)
+ }
+ case MethodType(params, result) =>
+ val params1 = flipped(mapOver(params))
+ val result1 = this(result)
+ if ((params1 eq params) && (result1 eq result)) tp
+ else copyMethodType(tp, params1, result1.substSym(params, params1))
+ case PolyType(tparams, result) =>
+ val tparams1 = flipped(mapOver(tparams))
+ val result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else PolyType(tparams1, result1.substSym(tparams, tparams1))
+ case NullaryMethodType(result) =>
+ val result1 = this(result)
+ if (result1 eq result) tp
+ else NullaryMethodType(result1)
+ case ConstantType(_) => tp
+ case SuperType(thistp, supertp) =>
+ val thistp1 = this(thistp)
+ val supertp1 = this(supertp)
+ if ((thistp1 eq thistp) && (supertp1 eq supertp)) tp
+ else SuperType(thistp1, supertp1)
+ case TypeBounds(lo, hi) =>
+ val lo1 = flipped(this(lo))
+ val hi1 = this(hi)
+ if ((lo1 eq lo) && (hi1 eq hi)) tp
+ else TypeBounds(lo1, hi1)
+ case BoundedWildcardType(bounds) =>
+ val bounds1 = this(bounds)
+ if (bounds1 eq bounds) tp
+ else BoundedWildcardType(bounds1.asInstanceOf[TypeBounds])
+ case rtp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve this
+ val decls1 = mapOver(decls)
+ copyRefinedType(rtp, parents1, decls1)
+ case ExistentialType(tparams, result) =>
+ val tparams1 = mapOver(tparams)
+ val result1 = this(result)
+ if ((tparams1 eq tparams) && (result1 eq result)) tp
+ else newExistentialType(tparams1, result1.substSym(tparams, tparams1))
+ case OverloadedType(pre, alts) =>
+ val pre1 = if (pre.isInstanceOf[ClassInfoType]) pre else this(pre)
+ if (pre1 eq pre) tp
+ else OverloadedType(pre1, alts)
+ case AntiPolyType(pre, args) =>
+ val pre1 = this(pre)
+ val args1 = args mapConserve this
+ if ((pre1 eq pre) && (args1 eq args)) tp
+ else AntiPolyType(pre1, args1)
+ case tv@TypeVar(_, constr) =>
+ if (constr.instValid) this(constr.inst)
+ else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty
+ case AnnotatedType(annots, atp, selfsym) =>
+ val annots1 = mapOverAnnotations(annots)
+ val atp1 = this(atp)
+ if ((annots1 eq annots) && (atp1 eq atp)) tp
+ else if (annots1.isEmpty) atp1
+ else AnnotatedType(annots1, atp1, selfsym)
+ /*
+ case ErrorType => tp
+ case WildcardType => tp
+ case NoType => tp
+ case NoPrefix => tp
+ case ErasedSingleType(sym) => tp
+ */
+ case _ =>
+ tp
+ // throw new Error("mapOver inapplicable for " + tp);
+ }
+
+ def withVariance[T](v: Variance)(body: => T): T = {
+ val saved = variance
+ variance = v
+ try body finally variance = saved
+ }
+ @inline final def flipped[T](body: => T): T = {
+ if (trackVariance) variance = variance.flip
+ try body
+ finally if (trackVariance) variance = variance.flip
+ }
+ protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = (
+ if (trackVariance)
+ map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg)))
+ else
+ args mapConserve this
+ )
+ /** Applies this map to the symbol's info, setting variance = Invariant
+ * if necessary when the symbol is an alias.
+ */
+ private def applyToSymbolInfo(sym: Symbol): Type = {
+ if (trackVariance && !variance.isInvariant && sym.isAliasType)
+ withVariance(Invariant)(this(sym.info))
+ else
+ this(sym.info)
+ }
+
+ /** Called by mapOver to determine whether the original symbols can
+ * be returned, or whether they must be cloned.
+ */
+ protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = {
+ @tailrec def loop(syms: List[Symbol]): Boolean = syms match {
+ case Nil => true
+ case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs)
+ }
+ loop(origSyms)
+ }
+
+ /** Map this function over given scope */
+ def mapOver(scope: Scope): Scope = {
+ val elems = scope.toList
+ val elems1 = mapOver(elems)
+ if (elems1 eq elems) scope
+ else newScopeWith(elems1: _*)
+ }
+
+ /** Map this function over given list of symbols */
+ def mapOver(origSyms: List[Symbol]): List[Symbol] = {
+ // fast path in case nothing changes due to map
+ if (noChangeToSymbols(origSyms)) origSyms
+ // map is not the identity --> do cloning properly
+ else cloneSymbolsAndModify(origSyms, TypeMap.this)
+ }
+
+ def mapOver(annot: AnnotationInfo): AnnotationInfo = {
+ val AnnotationInfo(atp, args, assocs) = annot
+ val atp1 = mapOver(atp)
+ val args1 = mapOverAnnotArgs(args)
+ // there is no need to rewrite assocs, as they are constants
+
+ if ((args eq args1) && (atp eq atp1)) annot
+ else if (args1.isEmpty && args.nonEmpty) UnmappableAnnotation // some annotation arg was unmappable
+ else AnnotationInfo(atp1, args1, assocs) setPos annot.pos
+ }
+
+ def mapOverAnnotations(annots: List[AnnotationInfo]): List[AnnotationInfo] = {
+ val annots1 = annots mapConserve mapOver
+ if (annots1 eq annots) annots
+ else annots1 filterNot (_ eq UnmappableAnnotation)
+ }
+
+ /** Map over a set of annotation arguments. If any
+ * of the arguments cannot be mapped, then return Nil. */
+ def mapOverAnnotArgs(args: List[Tree]): List[Tree] = {
+ val args1 = args mapConserve mapOver
+ if (args1 contains UnmappableTree) Nil
+ else args1
+ }
+
+ def mapOver(tree: Tree): Tree =
+ mapOver(tree, () => return UnmappableTree)
+
+ /** Map a tree that is part of an annotation argument.
+ * If the tree cannot be mapped, then invoke giveup().
+ * The default is to transform the tree with
+ * TypeMapTransformer.
+ */
+ def mapOver(tree: Tree, giveup: ()=>Nothing): Tree =
+ (new TypeMapTransformer).transform(tree)
+
+ /** This transformer leaves the tree alone except to remap
+ * its types. */
+ class TypeMapTransformer extends Transformer {
+ override def transform(tree: Tree) = {
+ val tree1 = super.transform(tree)
+ val tpe1 = TypeMap.this(tree1.tpe)
+ if ((tree eq tree1) && (tree.tpe eq tpe1))
+ tree
+ else
+ tree1.shallowDuplicate.setType(tpe1)
+ }
+ }
+ }
+
+ abstract class TypeTraverser extends TypeMap {
+ def traverse(tp: Type): Unit
+ def apply(tp: Type): Type = { traverse(tp); tp }
+ }
+
+ abstract class TypeTraverserWithResult[T] extends TypeTraverser {
+ def result: T
+ def clear(): Unit
+ }
+
+ abstract class TypeCollector[T](initial: T) extends TypeTraverser {
+ var result: T = _
+ def collect(tp: Type) = {
+ result = initial
+ traverse(tp)
+ result
+ }
+ }
+
+ /** The raw to existential map converts a ''raw type'' to an existential type.
+ * It is necessary because we might have read a raw type of a
+ * parameterized Java class from a class file. At the time we read the type
+ * the corresponding class file might still not be read, so we do not
+ * know what the type parameters of the type are. Therefore
+ * the conversion of raw types to existential types might not have taken place
+ * in ClassFileparser.sigToType (where it is usually done).
+ */
+ def rawToExistential = new TypeMap {
+ private var expanded = immutable.Set[Symbol]()
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(pre, sym, List()) if isRawIfWithoutArgs(sym) =>
+ if (expanded contains sym) AnyRefTpe
+ else try {
+ expanded += sym
+ val eparams = mapOver(typeParamsToExistentials(sym))
+ existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe)))
+ } finally {
+ expanded -= sym
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ /***
+ *@M: I think this is more desirable, but Martin prefers to leave raw-types as-is as much as possible
+ object rawToExistentialInJava extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ // any symbol that occurs in a java sig, not just java symbols
+ // see http://lampsvn.epfl.ch/trac/scala/ticket/2454#comment:14
+ case TypeRef(pre, sym, List()) if !sym.typeParams.isEmpty =>
+ val eparams = typeParamsToExistentials(sym, sym.typeParams)
+ existentialAbstraction(eparams, TypeRef(pre, sym, eparams map (_.tpe)))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ */
+
+ /** Used by existentialAbstraction.
+ */
+ class ExistentialExtrapolation(tparams: List[Symbol]) extends TypeMap(trackVariance = true) {
+ private val occurCount = mutable.HashMap[Symbol, Int]()
+ private def countOccs(tp: Type) = {
+ tp foreach {
+ case TypeRef(_, sym, _) =>
+ if (tparams contains sym)
+ occurCount(sym) += 1
+ case _ => ()
+ }
+ }
+ def extrapolate(tpe: Type): Type = {
+ tparams foreach (t => occurCount(t) = 0)
+ countOccs(tpe)
+ for (tparam <- tparams)
+ countOccs(tparam.info)
+
+ apply(tpe)
+ }
+
+ /** If these conditions all hold:
+ * 1) we are in covariant (or contravariant) position
+ * 2) this type occurs exactly once in the existential scope
+ * 3) the widened upper (or lower) bound of this type contains no references to tparams
+ * Then we replace this lone occurrence of the type with the widened upper (or lower) bound.
+ * All other types pass through unchanged.
+ */
+ def apply(tp: Type): Type = {
+ val tp1 = mapOver(tp)
+ if (variance.isInvariant) tp1
+ else tp1 match {
+ case TypeRef(pre, sym, args) if tparams contains sym =>
+ val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo
+ val count = occurCount(sym)
+ val containsTypeParam = tparams exists (repl contains _)
+ def msg = {
+ val word = if (variance.isPositive) "upper" else "lower"
+ s"Widened lone occurrence of $tp1 inside existential to $word bound"
+ }
+ if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam)
+ logResult(msg)(repl)
+ else
+ tp1
+ case _ =>
+ tp1
+ }
+ }
+ override def mapOver(tp: Type): Type = tp match {
+ case SingleType(pre, sym) =>
+ if (sym.isPackageClass) tp // short path
+ else {
+ val pre1 = this(pre)
+ if ((pre1 eq pre) || !pre1.isStable) tp
+ else singleType(pre1, sym)
+ }
+ case _ => super.mapOver(tp)
+ }
+
+ // Do not discard the types of existential ident's. The
+ // symbol of the Ident itself cannot be listed in the
+ // existential's parameters, so the resulting existential
+ // type would be ill-formed.
+ override def mapOver(tree: Tree) = tree match {
+ case Ident(_) if tree.tpe.isStable => tree
+ case _ => super.mapOver(tree)
+ }
+ }
+
+ /** Might the given symbol be important when calculating the prefix
+ * of a type? When tp.asSeenFrom(pre, clazz) is called on `tp`,
+ * the result will be `tp` unchanged if `pre` is trivial and `clazz`
+ * is a symbol such that isPossiblePrefix(clazz) == false.
+ */
+ def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+
+ protected[internal] def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
+ def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap =
+ new AsSeenFromMap(pre, clazz)
+
+ /** A map to compute the asSeenFrom method.
+ */
+ class AsSeenFromMap(seenFromPrefix: Type, seenFromClass: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
+ // Some example source constructs relevant in asSeenFrom:
+ //
+ // object CaptureThis {
+ // trait X[A] { def f: this.type = this }
+ // class Y[A] { def f: this.type = this }
+ // // Created new existential to represent This(CaptureThis.X) seen from CaptureThis.X[B]: type _1.type <: CaptureThis.X[B] with Singleton
+ // def f1[B] = new X[B] { }
+ // // TODO - why is the behavior different when it's a class?
+ // def f2[B] = new Y[B] { }
+ // }
+ // class CaptureVal[T] {
+ // val f: java.util.List[_ <: T] = null
+ // // Captured existential skolem for type _$1 seen from CaptureVal.this.f.type: type _$1
+ // def g = f get 0
+ // }
+ // class ClassParam[T] {
+ // // AsSeenFromMap(Inner.this.type, class Inner)/classParameterAsSeen(T)#loop(ClassParam.this.type, class ClassParam)
+ // class Inner(lhs: T) { def f = lhs }
+ // }
+ def capturedParams: List[Symbol] = _capturedParams
+ def capturedSkolems: List[Symbol] = _capturedSkolems
+
+ def apply(tp: Type): Type = tp match {
+ case tp @ ThisType(_) => thisTypeAsSeen(tp)
+ case tp @ SingleType(_, sym) => if (sym.isPackageClass) tp else singleTypeAsSeen(tp)
+ case tp @ TypeRef(_, sym, _) if isTypeParamOfEnclosingClass(sym) => classParameterAsSeen(tp)
+ case _ => mapOver(tp)
+ }
+
+ private var _capturedSkolems: List[Symbol] = Nil
+ private var _capturedParams: List[Symbol] = Nil
+ private val isStablePrefix = seenFromPrefix.isStable
+
+ // isBaseClassOfEnclosingClassOrInfoIsNotYetComplete would be a more accurate
+ // but less succinct name.
+ private def isBaseClassOfEnclosingClass(base: Symbol) = {
+ def loop(encl: Symbol): Boolean = (
+ isPossiblePrefix(encl)
+ && ((encl isSubClass base) || loop(encl.owner.enclClass))
+ )
+ // The hasCompleteInfo guard is necessary to avoid cycles during the typing
+ // of certain classes, notably ones defined inside package objects.
+ !base.hasCompleteInfo || loop(seenFromClass)
+ }
+
+ /** Is the symbol a class type parameter from one of the enclosing
+ * classes, or a base class of one of them?
+ */
+ private def isTypeParamOfEnclosingClass(sym: Symbol): Boolean = (
+ sym.isTypeParameter
+ && sym.owner.isClass
+ && isBaseClassOfEnclosingClass(sym.owner)
+ )
+
+ /** Creates an existential representing a type parameter which appears
+ * in the prefix of a ThisType.
+ */
+ protected def captureThis(pre: Type, clazz: Symbol): Type = {
+ capturedParams find (_.owner == clazz) match {
+ case Some(p) => p.tpe
+ case _ =>
+ val qvar = clazz freshExistential nme.SINGLETON_SUFFIX setInfo singletonBounds(pre)
+ _capturedParams ::= qvar
+ debuglog(s"Captured This(${clazz.fullNameString}) seen from $seenFromPrefix: ${qvar.defString}")
+ qvar.tpe
+ }
+ }
+ protected def captureSkolems(skolems: List[Symbol]) {
+ for (p <- skolems; if !(capturedSkolems contains p)) {
+ debuglog(s"Captured $p seen from $seenFromPrefix")
+ _capturedSkolems ::= p
+ }
+ }
+
+ /** Find the type argument in an applied type which corresponds to a type parameter.
+ * The arguments are required to be related as follows, through intermediary `clazz`.
+ * An exception will be thrown if this is violated.
+ *
+ * @param lhs its symbol is a type parameter of `clazz`
+ * @param rhs a type application constructed from `clazz`
+ */
+ private def correspondingTypeArgument(lhs: Type, rhs: Type): Type = {
+ val TypeRef(_, lhsSym, lhsArgs) = lhs
+ val TypeRef(_, rhsSym, rhsArgs) = rhs
+ require(lhsSym.safeOwner == rhsSym, s"$lhsSym is not a type parameter of $rhsSym")
+
+ // Find the type parameter position; we'll use the corresponding argument.
+ // Why are we checking by name rather than by equality? Because for
+ // reasons which aren't yet fully clear, we can arrive here holding a type
+ // parameter whose owner is rhsSym, and which shares the name of an actual
+ // type parameter of rhsSym, but which is not among the type parameters of
+ // rhsSym. One can see examples of it at SI-4365.
+ val argIndex = rhsSym.typeParams indexWhere (lhsSym.name == _.name)
+ // don't be too zealous with the exceptions, see #2641
+ if (argIndex < 0 && rhs.parents.exists(typeIsErroneous))
+ ErrorType
+ else {
+ // It's easy to get here when working on hardcore type machinery (not to
+ // mention when not doing so, see above) so let's provide a standout error.
+ def own_s(s: Symbol) = s.nameString + " in " + s.safeOwner.nameString
+ def explain =
+ sm"""| sought ${own_s(lhsSym)}
+ | classSym ${own_s(rhsSym)}
+ | tparams ${rhsSym.typeParams map own_s mkString ", "}
+ |"""
+
+ if (argIndex < 0)
+ abort(s"Something is wrong: cannot find $lhs in applied type $rhs\n" + explain)
+ else {
+ val targ = rhsArgs(argIndex)
+ // @M! don't just replace the whole thing, might be followed by type application
+ val result = appliedType(targ, lhsArgs mapConserve this)
+ def msg = s"Created $result, though could not find ${own_s(lhsSym)} among tparams of ${own_s(rhsSym)}"
+ if (!rhsSym.typeParams.contains(lhsSym))
+ devWarning(s"Inconsistent tparam/owner views: had to fall back on names\n$msg\n$explain")
+
+ result
+ }
+ }
+ }
+
+ // 0) @pre: `classParam` is a class type parameter
+ // 1) Walk the owner chain of `seenFromClass` until we find the class which owns `classParam`
+ // 2) Take the base type of the prefix at that point with respect to the owning class
+ // 3) Solve for the type parameters through correspondence with the type args of the base type
+ //
+ // Only class type parameters (and not skolems) are considered, because other type parameters
+ // are not influenced by the prefix through which they are seen. Note that type params of
+ // anonymous type functions, which currently can only arise from normalising type aliases, are
+ // owned by the type alias of which they are the eta-expansion.
+ private def classParameterAsSeen(classParam: Type): Type = {
+ val TypeRef(_, tparam, _) = classParam
+
+ def loop(pre: Type, clazz: Symbol): Type = {
+ // have to deconst because it may be a Class[T]
+ def nextBase = (pre baseType clazz).deconst
+ //@M! see test pos/tcpoly_return_overriding.scala why mapOver is necessary
+ if (skipPrefixOf(pre, clazz))
+ mapOver(classParam)
+ else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner))
+ loop(nextBase.prefix, clazz.owner)
+ else nextBase match {
+ case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied)
+ case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz)
+ case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}")
+ }
+ }
+ loop(seenFromPrefix, seenFromClass)
+ }
+
+ // Does the candidate symbol match the given prefix and class?
+ // Since pre may be something like ThisType(A) where trait A { self: B => },
+ // we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
+ // B will not be considered.
+ private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match {
+ case _: TypeVar => false
+ case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz)
+ }
+
+ // Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
+ private[this] var wroteAnnotation = false
+ private object annotationArgRewriter extends TypeMapTransformer {
+ private def matchesThis(thiz: Symbol) = matchesPrefixAndClass(seenFromPrefix, seenFromClass)(thiz)
+
+ // what symbol should really be used?
+ private def newThis(): Tree = {
+ wroteAnnotation = true
+ val presym = seenFromPrefix.widen.typeSymbol
+ val thisSym = presym.owner.newValue(presym.name.toTermName, presym.pos) setInfo seenFromPrefix
+ gen.mkAttributedQualifier(seenFromPrefix, thisSym)
+ }
+
+ /** Rewrite `This` trees in annotation argument trees */
+ override def transform(tree: Tree): Tree = super.transform(tree) match {
+ case This(_) if matchesThis(tree.symbol) => newThis()
+ case tree => tree
+ }
+ }
+
+ // This becomes considerably cheaper if we optimize for the common cases:
+ // where the prefix is stable and where no This nodes are rewritten. If
+ // either is true, then we don't need to worry about calling giveup. So if
+ // the prefix is unstable, use a stack variable to indicate whether the tree
+ // was touched. This takes us to one allocation per AsSeenFromMap rather
+ // than an allocation on every call to mapOver, and no extra work when the
+ // tree only has its types remapped.
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ if (isStablePrefix)
+ annotationArgRewriter transform tree
+ else {
+ val saved = wroteAnnotation
+ wroteAnnotation = false
+ try annotationArgRewriter transform tree
+ finally if (wroteAnnotation) giveup() else wroteAnnotation = saved
+ }
+ }
+
+ private def thisTypeAsSeen(tp: ThisType): Type = {
+ def loop(pre: Type, clazz: Symbol): Type = {
+ val pre1 = pre match {
+ case SuperType(thistpe, _) => thistpe
+ case _ => pre
+ }
+ if (skipPrefixOf(pre, clazz))
+ mapOver(tp) // TODO - is mapOver necessary here?
+ else if (!matchesPrefixAndClass(pre, clazz)(tp.sym))
+ loop((pre baseType clazz).prefix, clazz.owner)
+ else if (pre1.isStable)
+ pre1
+ else
+ captureThis(pre1, clazz)
+ }
+ loop(seenFromPrefix, seenFromClass)
+ }
+
+ private def singleTypeAsSeen(tp: SingleType): Type = {
+ val SingleType(pre, sym) = tp
+
+ val pre1 = this(pre)
+ if (pre1 eq pre) tp
+ else if (pre1.isStable) singleType(pre1, sym)
+ else pre1.memberType(sym).resultType //todo: this should be rolled into existential abstraction
+ }
+
+ override def toString = s"AsSeenFromMap($seenFromPrefix, $seenFromClass)"
+ }
+
+ /** A base class to compute all substitutions */
+ abstract class SubstMap[T](from: List[Symbol], to: List[T]) extends TypeMap {
+ assert(sameLength(from, to), "Unsound substitution from "+ from +" to "+ to)
+
+ /** Are `sym` and `sym1` the same? Can be tuned by subclasses. */
+ protected def matches(sym: Symbol, sym1: Symbol): Boolean = sym eq sym1
+
+ /** Map target to type, can be tuned by subclasses */
+ protected def toType(fromtp: Type, tp: T): Type
+
+ protected def renameBoundSyms(tp: Type): Type = tp match {
+ case MethodType(ps, restp) =>
+ createFromClonedSymbols(ps, restp)((ps1, tp1) => copyMethodType(tp, ps1, renameBoundSyms(tp1)))
+ case PolyType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)((ps1, tp1) => PolyType(ps1, renameBoundSyms(tp1)))
+ case ExistentialType(bs, restp) =>
+ createFromClonedSymbols(bs, restp)(newExistentialType)
+ case _ =>
+ tp
+ }
+
+ @tailrec private def subst(tp: Type, sym: Symbol, from: List[Symbol], to: List[T]): Type = (
+ if (from.isEmpty) tp
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(tp, from))
+ else if (matches(from.head, sym)) toType(tp, to.head)
+ else subst(tp, sym, from.tail, to.tail)
+ )
+
+ def apply(tp0: Type): Type = if (from.isEmpty) tp0 else {
+ val boundSyms = tp0.boundSyms
+ val tp1 = if (boundSyms.nonEmpty && (boundSyms exists from.contains)) renameBoundSyms(tp0) else tp0
+ val tp = mapOver(tp1)
+ def substFor(sym: Symbol) = subst(tp, sym, from, to)
+
+ tp match {
+ // @M
+ // 1) arguments must also be substituted (even when the "head" of the
+ // applied type has already been substituted)
+ // example: (subst RBound[RT] from [type RT,type RBound] to
+ // [type RT&,type RBound&]) = RBound&[RT&]
+ // 2) avoid loops (which occur because alpha-conversion is
+ // not performed properly imo)
+ // e.g. if in class Iterable[a] there is a new Iterable[(a,b)],
+ // we must replace the a in Iterable[a] by (a,b)
+ // (must not recurse --> loops)
+ // 3) replacing m by List in m[Int] should yield List[Int], not just List
+ case TypeRef(NoPrefix, sym, args) =>
+ val tcon = substFor(sym)
+ if ((tp eq tcon) || args.isEmpty) tcon
+ else appliedType(tcon.typeConstructor, args)
+ case SingleType(NoPrefix, sym) =>
+ substFor(sym)
+ case _ =>
+ tp
+ }
+ }
+ }
+
+ /** A map to implement the `substSym` method. */
+ class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
+ protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
+ case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
+ case SingleType(pre, _) => singleType(pre, sym)
+ }
+ @tailrec private def subst(sym: Symbol, from: List[Symbol], to: List[Symbol]): Symbol = (
+ if (from.isEmpty) sym
+ // else if (to.isEmpty) error("Unexpected substitution on '%s': from = %s but to == Nil".format(sym, from))
+ else if (matches(from.head, sym)) to.head
+ else subst(sym, from.tail, to.tail)
+ )
+ private def substFor(sym: Symbol) = subst(sym, from, to)
+
+ override def apply(tp: Type): Type = (
+ if (from.isEmpty) tp
+ else tp match {
+ case TypeRef(pre, sym, args) if pre ne NoPrefix =>
+ val newSym = substFor(sym)
+ // mapOver takes care of subst'ing in args
+ mapOver ( if (sym eq newSym) tp else copyTypeRef(tp, pre, newSym, args) )
+ // assert(newSym.typeParams.length == sym.typeParams.length, "typars mismatch in SubstSymMap: "+(sym, sym.typeParams, newSym, newSym.typeParams))
+ case SingleType(pre, sym) if pre ne NoPrefix =>
+ val newSym = substFor(sym)
+ mapOver( if (sym eq newSym) tp else singleType(pre, newSym) )
+ case _ =>
+ super.apply(tp)
+ }
+ )
+
+ object mapTreeSymbols extends TypeMapTransformer {
+ val strictCopy = newStrictTreeCopier
+
+ def termMapsTo(sym: Symbol) = from indexOf sym match {
+ case -1 => None
+ case idx => Some(to(idx))
+ }
+
+ // if tree.symbol is mapped to another symbol, passes the new symbol into the
+ // constructor `trans` and sets the symbol and the type on the resulting tree.
+ def transformIfMapped(tree: Tree)(trans: Symbol => Tree) = termMapsTo(tree.symbol) match {
+ case Some(toSym) => trans(toSym) setSymbol toSym setType tree.tpe
+ case None => tree
+ }
+
+ // changes trees which refer to one of the mapped symbols. trees are copied before attributes are modified.
+ override def transform(tree: Tree) = {
+ // super.transform maps symbol references in the types of `tree`. it also copies trees where necessary.
+ super.transform(tree) match {
+ case id @ Ident(_) =>
+ transformIfMapped(id)(toSym =>
+ strictCopy.Ident(id, toSym.name))
+
+ case sel @ Select(qual, name) =>
+ transformIfMapped(sel)(toSym =>
+ strictCopy.Select(sel, qual, toSym.name))
+
+ case tree => tree
+ }
+ }
+ }
+ override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
+ mapTreeSymbols.transform(tree)
+ }
+ }
+
+ /** A map to implement the `subst` method. */
+ class SubstTypeMap(from: List[Symbol], to: List[Type])
+ extends SubstMap(from, to) {
+ protected def toType(fromtp: Type, tp: Type) = tp
+
+ override def mapOver(tree: Tree, giveup: () => Nothing): Tree = {
+ object trans extends TypeMapTransformer {
+ override def transform(tree: Tree) = tree match {
+ case Ident(name) =>
+ from indexOf tree.symbol match {
+ case -1 => super.transform(tree)
+ case idx =>
+ val totpe = to(idx)
+ if (totpe.isStable) tree.duplicate setType totpe
+ else giveup()
+ }
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ trans.transform(tree)
+ }
+ }
+
+ /** A map to implement the `substThis` method. */
+ class SubstThisMap(from: Symbol, to: Type) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) if (sym == from) => to
+ case _ => mapOver(tp)
+ }
+ }
+
+ class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
+ def apply(tp: Type): Type = try {
+ tp match {
+ case TypeRef(_, sym, _) if from contains sym =>
+ BoundedWildcardType(sym.info.bounds)
+ case _ =>
+ mapOver(tp)
+ }
+ } catch {
+ case ex: MalformedType =>
+ WildcardType
+ }
+ }
+
+ // dependent method types
+ object IsDependentCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (tp.isImmediatelyDependent) result = true
+ else if (!result) mapOver(tp)
+ }
+ }
+
+ object ApproximateDependentMap extends TypeMap {
+ def apply(tp: Type): Type =
+ if (tp.isImmediatelyDependent) WildcardType
+ else mapOver(tp)
+ }
+
+ /** Note: This map is needed even for non-dependent method types, despite what the name might imply.
+ */
+ class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints {
+ private val actuals = actuals0.toIndexedSeq
+ private val existentials = new Array[Symbol](actuals.size)
+ def existentialsNeeded: List[Symbol] = existentials.filter(_ ne null).toList
+
+ private object StableArg {
+ def unapply(param: Symbol) = Arg unapply param map actuals filter (tp =>
+ tp.isStable && (tp.typeSymbol != NothingClass)
+ )
+ }
+ private object Arg {
+ def unapply(param: Symbol) = Some(params indexOf param) filter (_ >= 0)
+ }
+
+ def apply(tp: Type): Type = mapOver(tp) match {
+ // unsound to replace args by unstable actual #3873
+ case SingleType(NoPrefix, StableArg(arg)) => arg
+ // (soundly) expand type alias selections on implicit arguments,
+ // see depmet_implicit_oopsla* test cases -- typically, `param.isImplicit`
+ case tp1 @ TypeRef(SingleType(NoPrefix, Arg(pid)), sym, targs) =>
+ val arg = actuals(pid)
+ val res = typeRef(arg, sym, targs)
+ if (res.typeSymbolDirect.isAliasType) res.dealias else tp1
+ // don't return the original `tp`, which may be different from `tp1`,
+ // due to dropping annotations
+ case tp1 => tp1
+ }
+
+ /* Return the type symbol for referencing a parameter inside the existential quantifier.
+ * (Only needed if the actual is unstable.)
+ */
+ private def existentialFor(pid: Int) = {
+ if (existentials(pid) eq null) {
+ val param = params(pid)
+ existentials(pid) = (
+ param.owner.newExistential(param.name.toTypeName append nme.SINGLETON_SUFFIX, param.pos, param.flags)
+ setInfo singletonBounds(actuals(pid))
+ )
+ }
+ existentials(pid)
+ }
+
+ //AM propagate more info to annotations -- this seems a bit ad-hoc... (based on code by spoon)
+ override def mapOver(arg: Tree, giveup: ()=>Nothing): Tree = {
+ // TODO: this should be simplified; in the stable case, one can
+ // probably just use an Ident to the tree.symbol.
+ //
+ // @PP: That leads to failure here, where stuff no longer has type
+ // 'String @Annot("stuff")' but 'String @Annot(x)'.
+ //
+ // def m(x: String): String @Annot(x) = x
+ // val stuff = m("stuff")
+ //
+ // (TODO cont.) Why an existential in the non-stable case?
+ //
+ // @PP: In the following:
+ //
+ // def m = { val x = "three" ; val y: String @Annot(x) = x; y }
+ //
+ // m is typed as 'String @Annot(x) forSome { val x: String }'.
+ //
+ // Both examples are from run/constrained-types.scala.
+ object treeTrans extends Transformer {
+ override def transform(tree: Tree): Tree = tree.symbol match {
+ case StableArg(actual) =>
+ gen.mkAttributedQualifier(actual, tree.symbol)
+ case Arg(pid) =>
+ val sym = existentialFor(pid)
+ Ident(sym) copyAttrs tree setType typeRef(NoPrefix, sym, Nil)
+ case _ =>
+ super.transform(tree)
+ }
+ }
+ treeTrans transform arg
+ }
+ }
+
+ /** A map to convert every occurrence of a wildcard type to a fresh
+ * type variable */
+ object wildcardToTypeVarMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case WildcardType =>
+ TypeVar(tp, new TypeConstraint)
+ case BoundedWildcardType(bounds) =>
+ TypeVar(tp, new TypeConstraint(bounds))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** A map to convert every occurrence of a type variable to a wildcard type. */
+ object typeVarToOriginMap extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeVar(origin, _) => origin
+ case _ => mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsCollector(sym: Symbol) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ tp.normalize match {
+ case TypeRef(_, sym1, _) if (sym == sym1) => result = true
+ case SingleType(_, sym1) if (sym == sym1) => result = true
+ case _ => mapOver(tp)
+ }
+ }
+ }
+
+ override def mapOver(arg: Tree) = {
+ for (t <- arg) {
+ traverse(t.tpe)
+ if (t.symbol == sym)
+ result = true
+ }
+ arg
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ class ContainsTypeCollector(t: Type) extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ if (tp eq t) result = true
+ else mapOver(tp)
+ }
+ }
+ override def mapOver(arg: Tree) = {
+ for (t <- arg)
+ traverse(t.tpe)
+
+ arg
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FilterTypeCollector(p: Type => Boolean) extends TypeCollector[List[Type]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (p(tp)) result ::= tp
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `collect` method. */
+ class CollectTypeCollector[T](pf: PartialFunction[Type, T]) extends TypeCollector[List[T]](Nil) {
+ override def collect(tp: Type) = super.collect(tp).reverse
+
+ def traverse(tp: Type) {
+ if (pf.isDefinedAt(tp)) result ::= pf(tp)
+ mapOver(tp)
+ }
+ }
+
+ class ForEachTypeTraverser(f: Type => Unit) extends TypeTraverser {
+ def traverse(tp: Type) {
+ f(tp)
+ mapOver(tp)
+ }
+ }
+
+ /** A map to implement the `filter` method. */
+ class FindTypeCollector(p: Type => Boolean) extends TypeCollector[Option[Type]](None) {
+ def traverse(tp: Type) {
+ if (result.isEmpty) {
+ if (p(tp)) result = Some(tp)
+ mapOver(tp)
+ }
+ }
+ }
+
+ /** A map to implement the `contains` method. */
+ object ErroneousCollector extends TypeCollector(false) {
+ def traverse(tp: Type) {
+ if (!result) {
+ result = tp.isError
+ mapOver(tp)
+ }
+ }
+ }
+
+ object adaptToNewRunMap extends TypeMap {
+
+ private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = {
+ if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass)
+ sym
+ else if (sym.isModuleClass) {
+ val sourceModule1 = adaptToNewRun(pre, sym.sourceModule)
+
+ sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse {
+ val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s"
+ debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass))
+ sym
+ }
+ }
+ else {
+ var rebind0 = pre.findMember(sym.name, BRIDGE, 0, stableOnly = true) orElse {
+ if (sym.isAliasType) throw missingAliasException
+ devWarning(s"$pre.$sym no longer exist at phase $phase")
+ throw new MissingTypeControl // For build manager and presentation compiler purposes
+ }
+ /* The two symbols have the same fully qualified name */
+ def corresponds(sym1: Symbol, sym2: Symbol): Boolean =
+ sym1.name == sym2.name && (sym1.isPackageClass || corresponds(sym1.owner, sym2.owner))
+ if (!corresponds(sym.owner, rebind0.owner)) {
+ debuglog("ADAPT1 pre = "+pre+", sym = "+sym.fullLocationString+", rebind = "+rebind0.fullLocationString)
+ val bcs = pre.baseClasses.dropWhile(bc => !corresponds(bc, sym.owner))
+ if (bcs.isEmpty)
+ assert(pre.typeSymbol.isRefinementClass, pre) // if pre is a refinementclass it might be a structural type => OK to leave it in.
+ else
+ rebind0 = pre.baseType(bcs.head).member(sym.name)
+ debuglog(
+ "ADAPT2 pre = " + pre +
+ ", bcs.head = " + bcs.head +
+ ", sym = " + sym.fullLocationString +
+ ", rebind = " + rebind0.fullLocationString
+ )
+ }
+ rebind0.suchThat(sym => sym.isType || sym.isStable) orElse {
+ debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType)
+ throw new MalformedType(pre, sym.nameString)
+ }
+ }
+ }
+ def apply(tp: Type): Type = tp match {
+ case ThisType(sym) =>
+ try {
+ val sym1 = adaptToNewRun(sym.owner.thisType, sym)
+ if (sym1 == sym) tp else ThisType(sym1)
+ } catch {
+ case ex: MissingTypeControl =>
+ tp
+ }
+ case SingleType(pre, sym) =>
+ if (sym.isPackage) tp
+ else {
+ val pre1 = this(pre)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym)) tp
+ else singleType(pre1, sym1)
+ } catch {
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case TypeRef(pre, sym, args) =>
+ if (sym.isPackageClass) tp
+ else {
+ val pre1 = this(pre)
+ val args1 = args mapConserve (this)
+ try {
+ val sym1 = adaptToNewRun(pre1, sym)
+ if ((pre1 eq pre) && (sym1 eq sym) && (args1 eq args)/* && sym.isExternal*/) {
+ tp
+ } else if (sym1 == NoSymbol) {
+ devWarning(s"adapt to new run failed: pre=$pre pre1=$pre1 sym=$sym")
+ tp
+ } else {
+ copyTypeRef(tp, pre1, sym1, args1)
+ }
+ } catch {
+ case ex: MissingAliasControl =>
+ apply(tp.dealias)
+ case _: MissingTypeControl =>
+ tp
+ }
+ }
+ case MethodType(params, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else copyMethodType(tp, params, restp1)
+ case NullaryMethodType(restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else NullaryMethodType(restp1)
+ case PolyType(tparams, restp) =>
+ val restp1 = this(restp)
+ if (restp1 eq restp) tp
+ else PolyType(tparams, restp1)
+
+ // Lukas: we need to check (together) whether we should also include parameter types
+ // of PolyType and MethodType in adaptToNewRun
+
+ case ClassInfoType(parents, decls, clazz) =>
+ if (clazz.isPackageClass) tp
+ else {
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else ClassInfoType(parents1, decls, clazz)
+ }
+ case RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve (this)
+ if (parents1 eq parents) tp
+ else refinedType(parents1, tp.typeSymbol.owner, decls, tp.typeSymbol.owner.pos)
+ case SuperType(_, _) => mapOver(tp)
+ case TypeBounds(_, _) => mapOver(tp)
+ case TypeVar(_, _) => mapOver(tp)
+ case AnnotatedType(_,_,_) => mapOver(tp)
+ case ExistentialType(_, _) => mapOver(tp)
+ case _ => tp
+ }
+ }
+
+}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
new file mode 100644
index 0000000000..16929cca0f
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -0,0 +1,30 @@
+package scala
+package reflect
+package internal
+package tpe
+
+private[internal] trait TypeToStrings {
+ self: SymbolTable =>
+
+ /** The maximum number of recursions allowed in toString
+ */
+ final val maxTostringRecursions = 50
+
+ private var tostringRecursions = 0
+
+ protected def typeToString(tpe: Type): String =
+ if (tostringRecursions >= maxTostringRecursions) {
+ devWarning("Exceeded recursion depth attempting to print " + util.shortClassOfInstance(tpe))
+ if (settings.debug)
+ (new Throwable).printStackTrace
+
+ "..."
+ }
+ else
+ try {
+ tostringRecursions += 1
+ tpe.safeToString
+ } finally {
+ tostringRecursions -= 1
+ }
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 52d1657dc3..580ada8254 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
@@ -16,7 +17,7 @@ trait Erasure {
/** Is `tp` an unbounded generic type (i.e. which could be instantiated
* with primitive as well as class types)?.
*/
- private def genericCore(tp: Type): Type = tp.normalize match {
+ private def genericCore(tp: Type): Type = tp.dealiasWiden match {
/* A Java Array<T> is erased to Array[Object] (T can only be a reference type), where as a Scala Array[T] is
* erased to Object. However, there is only symbol for the Array class. So to make the distinction between
* a Java and a Scala array, we check if the owner of T comes from a Java class.
@@ -36,7 +37,7 @@ trait Erasure {
* then Some((N, T)) where N is the number of Array constructors enclosing `T`,
* otherwise None. Existentials on any level are ignored.
*/
- def unapply(tp: Type): Option[(Int, Type)] = tp.normalize match {
+ def unapply(tp: Type): Option[(Int, Type)] = tp.dealiasWiden match {
case TypeRef(_, ArrayClass, List(arg)) =>
genericCore(arg) match {
case NoType =>
@@ -54,9 +55,13 @@ trait Erasure {
}
}
+ /** Arrays despite their finality may turn up as refined type parents,
+ * e.g. with "tagged types" like Array[Int] with T.
+ */
protected def unboundedGenericArrayLevel(tp: Type): Int = tp match {
- case GenericArray(level, core) if !(core <:< AnyRefClass.tpe) => level
- case _ => 0
+ case GenericArray(level, core) if !(core <:< AnyRefTpe) => level
+ case RefinedType(ps, _) if ps.nonEmpty => logResult(s"Unbounded generic level for $tp is")(ps map unboundedGenericArrayLevel max)
+ case _ => 0
}
// @M #2585 when generating a java generic signature that includes
@@ -69,7 +74,7 @@ trait Erasure {
//
// This requires that cls.isClass.
protected def rebindInnerClass(pre: Type, cls: Symbol): Type = {
- if (cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
+ if (cls.owner.isClass) cls.owner.tpe_* else pre // why not cls.isNestedClass?
}
def unboxDerivedValueClassMethod(clazz: Symbol): Symbol =
@@ -101,13 +106,10 @@ trait Erasure {
def valueClassIsParametric(clazz: Symbol): Boolean = {
assert(!phase.erasedTypes)
clazz.typeParams contains
- clazz.derivedValueClassUnbox.tpe.resultType.normalize.typeSymbol
+ clazz.derivedValueClassUnbox.tpe.resultType.typeSymbol
}
abstract class ErasureMap extends TypeMap {
- private lazy val ObjectArray = arrayType(ObjectClass.tpe)
- private lazy val ErasedObject = erasedTypeRef(ObjectClass)
-
def mergeParents(parents: List[Type]): Type
def eraseNormalClassRef(pre: Type, clazz: Symbol): Type =
@@ -122,15 +124,15 @@ trait Erasure {
apply(st.supertype)
case tref @ TypeRef(pre, sym, args) =>
if (sym == ArrayClass)
- if (unboundedGenericArrayLevel(tp) == 1) ObjectClass.tpe
- else if (args.head.typeSymbol.isBottomClass) ObjectArray
+ if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe
+ else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe)
else typeRef(apply(pre), sym, args map applyInArray)
- else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass || sym == NotNullClass) ErasedObject
- else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
+ else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ObjectTpe
+ else if (sym == UnitClass) BoxedUnitTpe
else if (sym.isRefinementClass) apply(mergeParents(tp.parents))
else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref)
else if (sym.isClass) eraseNormalClassRef(pre, sym)
- else apply(sym.info) // alias type or abstract type
+ else apply(sym.info asSeenFrom (pre, sym.owner)) // alias type or abstract type
case PolyType(tparams, restpe) =>
apply(restpe)
case ExistentialType(tparams, restpe) =>
@@ -138,7 +140,7 @@ trait Erasure {
case mt @ MethodType(params, restpe) =>
MethodType(
cloneSymbolsAndModify(params, ErasureMap.this),
- if (restpe.typeSymbol == UnitClass) erasedTypeRef(UnitClass)
+ if (restpe.typeSymbol == UnitClass) UnitTpe
// this replaces each typeref that refers to an argument
// by the type `p.tpe` of the actual argument p (p in params)
else apply(mt.resultType(mt.paramTypes)))
@@ -149,7 +151,7 @@ trait Erasure {
case ClassInfoType(parents, decls, clazz) =>
ClassInfoType(
if (clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil
- else if (clazz == ArrayClass) List(ErasedObject)
+ else if (clazz == ArrayClass) ObjectTpe :: Nil
else removeLaterObjects(parents map this),
decls, clazz)
case _ =>
@@ -214,9 +216,6 @@ trait Erasure {
specialConstructorErasure(clazz, restpe)
case ExistentialType(tparams, restpe) =>
specialConstructorErasure(clazz, restpe)
- case RefinedType(parents, decls) =>
- specialConstructorErasure(
- clazz, specialScalaErasure.mergeParents(parents))
case mt @ MethodType(params, restpe) =>
MethodType(
cloneSymbolsAndModify(params, specialScalaErasure),
@@ -225,15 +224,7 @@ trait Erasure {
typeRef(pre, clazz, List())
case tp =>
if (!(clazz == ArrayClass || tp.isError))
- // See SI-6556. It seems in some cases the result constructor
- // type of an anonymous class is a different version of the class.
- // This has nothing to do with value classes per se.
- // We simply used a less discriminating transform before, that
- // did not look at the cases in detail.
- // It seems there is a deeper problem here, which needs
- // following up to. But we will not risk regressions
- // in 2.10 because of it.
- log(s"!!! unexpected constructor erasure $tp for $clazz")
+ assert(clazz == ArrayClass || tp.isError, s"!!! unexpected constructor erasure $tp for $clazz")
specialScalaErasure(tp)
}
}
@@ -263,7 +254,7 @@ trait Erasure {
* An intersection such as `Object with Trait` erases to Object.
*/
def mergeParents(parents: List[Type]): Type =
- if (parents.isEmpty) ObjectClass.tpe
+ if (parents.isEmpty) ObjectTpe
else parents.head
}
@@ -311,7 +302,7 @@ trait Erasure {
* - Otherwise, the dominator is the first element of the span.
*/
def intersectionDominator(parents: List[Type]): Type = {
- if (parents.isEmpty) ObjectClass.tpe
+ if (parents.isEmpty) ObjectTpe
else {
val psyms = parents map (_.typeSymbol)
if (psyms contains ArrayClass) {
@@ -333,10 +324,6 @@ trait Erasure {
}
}
- /** Type reference after erasure */
- def erasedTypeRef(sym: Symbol): Type =
- typeRef(erasure(sym)(sym.owner.tpe), sym, Nil)
-
/** The symbol's erased info. This is the type's erasure, except for the following symbols:
*
* - For $asInstanceOf : [T]T
@@ -364,8 +351,7 @@ trait Erasure {
else if (sym.name == nme.update)
(tp: @unchecked) match {
case MethodType(List(index, tvar), restpe) =>
- MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar),
- erasedTypeRef(UnitClass))
+ MethodType(List(index.cloneSymbol.setInfo(specialErasure(sym)(index.tpe)), tvar), UnitTpe)
}
else specialErasure(sym)(tp)
} else if (
diff --git a/src/reflect/scala/reflect/internal/transform/RefChecks.scala b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
index d6108ab665..4ca114e781 100644
--- a/src/reflect/scala/reflect/internal/transform/RefChecks.scala
+++ b/src/reflect/scala/reflect/internal/transform/RefChecks.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
@@ -10,4 +11,4 @@ trait RefChecks {
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isModule && !sym.isStatic) NullaryMethodType(tp)
else tp
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala
index 71cc80895d..fa185db22f 100644
--- a/src/reflect/scala/reflect/internal/transform/Transforms.scala
+++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 00c7c3de9a..abea8bed9f 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package transform
@@ -10,6 +11,14 @@ trait UnCurry {
import global._
import definitions._
+ /** Note: changing tp.normalize to tp.dealias in this method leads to a single
+ * test failure: run/t5688.scala, where instead of the expected output
+ * Vector(ta, tb, tab)
+ * we instead get
+ * Vector(tab, tb, tab)
+ * I think that difference is not the product of sentience but of randomness.
+ * Let us figure out why it is and then change this method.
+ */
private def expandAlias(tp: Type): Type = if (!tp.isHigherKinded) tp.normalize else tp
val uncurry: TypeMap = new TypeMap {
@@ -37,7 +46,7 @@ trait UnCurry {
apply(seqType(arg))
case TypeRef(pre, JavaRepeatedParamClass, arg :: Nil) =>
apply(arrayType(
- if (isUnboundedGeneric(arg)) ObjectClass.tpe else arg))
+ if (isUnboundedGeneric(arg)) ObjectTpe else arg))
case _ =>
expandAlias(mapOver(tp))
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
index 638bca8a72..10a8b4c812 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala
@@ -2,12 +2,13 @@
* Copyright 2005-2013 LAMP/EPFL
*/
-package scala.tools.nsc
-package interpreter
+package scala
+package reflect.internal.util
-import scala.tools.nsc.io.{ File, AbstractFile }
-import util.ScalaClassLoader
-import java.net.URL
+import scala.reflect.io.AbstractFile
+import java.security.cert.Certificate
+import java.security.{ ProtectionDomain, CodeSource }
+import java.net.{ URL, URLConnection, URLStreamHandler }
import scala.collection.{ mutable, immutable }
/**
@@ -25,15 +26,15 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
protected def findAbstractFile(name: String): AbstractFile = {
var file: AbstractFile = root
- val pathParts = classNameToPath(name) split '/'
+ val pathParts = name split '/'
for (dirPart <- pathParts.init) {
- file = file.lookupName(dirPart, true)
+ file = file.lookupName(dirPart, directory = true)
if (file == null)
return null
}
- file.lookupName(pathParts.last, false) match {
+ file.lookupName(pathParts.last, directory = false) match {
case null => null
case file => file
}
@@ -47,28 +48,56 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader)
val pathParts = dirNameToPath(name) split '/'
for (dirPart <- pathParts) {
- file = file.lookupName(dirPart, true)
+ file = file.lookupName(dirPart, directory = true)
if (file == null)
return null
}
- return file
+ file
}
+ // parent delegation in JCL uses getResource; so either add parent.getResAsStream
+ // or implement findResource, which we do here as a study in scarlet (my complexion
+ // after looking at CLs and URLs)
+ override def findResource(name: String): URL = findAbstractFile(name) match {
+ case null => null
+ case file => new URL(null, "repldir:" + file.path, new URLStreamHandler {
+ override def openConnection(url: URL): URLConnection = new URLConnection(url) {
+ override def connect() { }
+ override def getInputStream = file.input
+ }
+ })
+ }
+
+ // this inverts delegation order: super.getResAsStr calls parent.getRes if we fail
override def getResourceAsStream(name: String) = findAbstractFile(name) match {
case null => super.getResourceAsStream(name)
case file => file.input
}
- override def classBytes(name: String): Array[Byte] = findAbstractFile(name) match {
+ // ScalaClassLoader.classBytes uses getResAsStream, so we'll try again before delegating
+ override def classBytes(name: String): Array[Byte] = findAbstractFile(classNameToPath(name)) match {
case null => super.classBytes(name)
case file => file.toByteArray
}
- override def findClass(name: String): JClass = {
+ override def findClass(name: String): Class[_] = {
val bytes = classBytes(name)
if (bytes.length == 0)
throw new ClassNotFoundException(name)
else
- defineClass(name, bytes, 0, bytes.length)
+ defineClass(name, bytes, 0, bytes.length, protectionDomain)
+ }
+
+ lazy val protectionDomain = {
+ val cl = Thread.currentThread().getContextClassLoader()
+ val resource = cl.getResource("scala/runtime/package.class")
+ if (resource == null || resource.getProtocol != "jar") null else {
+ val s = resource.getPath
+ val n = s.lastIndexOf('!')
+ if (n < 0) null else {
+ val path = s.substring(0, n)
+ new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null)
+ }
+ }
}
private val packages = mutable.Map[String, Package]()
diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala
index 2ba15e0776..e127d577e1 100644
--- a/src/reflect/scala/reflect/internal/util/Collections.scala
+++ b/src/reflect/scala/reflect/internal/util/Collections.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.collection.{ mutable, immutable }
import scala.annotation.tailrec
@@ -40,8 +41,6 @@ trait Collections {
mforeach(xss)(x => if ((res eq null) && p(x)) res = Some(x))
if (res eq null) None else res
}
- final def mfilter[A](xss: List[List[A]])(p: A => Boolean) =
- for (xs <- xss; x <- xs; if p(x)) yield x
final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = {
val lb = new ListBuffer[C]
@@ -141,7 +140,7 @@ trait Collections {
ys1 = ys1.tail
ys2 = ys2.tail
}
- buf.result
+ buf.result()
}
final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = {
var ys1 = xs1
@@ -189,18 +188,6 @@ trait Collections {
}
false
}
- final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = {
- var ys1 = xs1
- var ys2 = xs2
- while (!ys1.isEmpty && !ys2.isEmpty) {
- if (!f(ys1.head, ys2.head))
- return false
-
- ys1 = ys1.tail
- ys2 = ys2.tail
- }
- true
- }
final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = {
var ys1 = xs1
var ys2 = xs2
@@ -223,5 +210,4 @@ trait Collections {
}
}
-object Collections extends Collections { }
-
+object Collections extends Collections
diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala
index 4135f3c469..b4178e055d 100644
--- a/src/reflect/scala/reflect/internal/util/HashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/HashSet.scala
@@ -3,11 +3,11 @@
* @author Martin Odersky
*/
-package scala.reflect.internal.util
+package scala
+package reflect
+package internal.util
object HashSet {
- def apply[T >: Null <: AnyRef](): HashSet[T] = this(16)
- def apply[T >: Null <: AnyRef](label: String): HashSet[T] = this(label, 16)
def apply[T >: Null <: AnyRef](initialCapacity: Int): HashSet[T] = this("No Label", initialCapacity)
def apply[T >: Null <: AnyRef](label: String, initialCapacity: Int): HashSet[T] =
new HashSet[T](label, initialCapacity)
diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala
index 3259a12163..2eb4fa29d5 100644
--- a/src/reflect/scala/reflect/internal/util/Origins.scala
+++ b/src/reflect/scala/reflect/internal/util/Origins.scala
@@ -3,12 +3,11 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package internal.util
-import NameTransformer._
import scala.collection.{ mutable, immutable }
-import Origins._
/** A debugging class for logging from whence a method is being called.
* Say you wanted to discover who was calling phase_= in SymbolTable.
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 8f287a1640..ddd0a64675 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -4,7 +4,8 @@
*
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.reflect.ClassTag
import scala.reflect.internal.FatalError
@@ -20,18 +21,12 @@ object Position {
else if (posIn.isDefined) posIn.inUltimateSource(posIn.source)
else posIn
)
- def file = pos.source.file
- def prefix = if (shortenFile) file.name else file.path
+ val prefix = if (shortenFile) pos.sourceName else pos.sourcePath
pos match {
case FakePos(fmsg) => fmsg+" "+msg
case NoPosition => msg
- case _ =>
- List(
- "%s:%s: %s".format(prefix, pos.line, msg),
- pos.lineContent.stripLineEnd,
- " " * (pos.column - 1) + "^"
- ) mkString "\n"
+ case _ => "%s:%s: %s\n%s\n%s".format(prefix, pos.line, msg, pos.lineContent, pos.lineCarat)
}
}
}
@@ -129,7 +124,7 @@ abstract class Position extends scala.reflect.api.Position { self =>
def endOrPoint: Int = point
@deprecated("use point instead", "2.9.0")
- def offset: Option[Int] = if (isDefined) Some(point) else None
+ def offset: Option[Int] = if (isDefined) Some(point) else None // used by sbt
/** The same position with a different start value (if a range) */
def withStart(off: Int): Position = this
@@ -206,12 +201,39 @@ abstract class Position extends scala.reflect.api.Position { self =>
def column: Int = throw new UnsupportedOperationException("Position.column")
+ /** A line with a ^ padded with the right number of spaces.
+ */
+ def lineCarat: String = " " * (column - 1) + "^"
+
+ /** The line of code and the corresponding carat pointing line, trimmed
+ * to the maximum specified width, with the trimmed versions oriented
+ * around the point to give maximum context.
+ */
+ def lineWithCarat(maxWidth: Int): (String, String) = {
+ val radius = maxWidth / 2
+ var start = scala.math.max(column - radius, 0)
+ var result = lineContent drop start take maxWidth
+
+ if (result.length < maxWidth) {
+ result = lineContent takeRight maxWidth
+ start = lineContent.length - result.length
+ }
+
+ (result, lineCarat drop start take maxWidth)
+ }
+
/** Convert this to a position around `point` that spans a single source line */
def toSingleLine: Position = this
- def lineContent: String =
- if (isDefined) source.lineToString(line - 1)
- else "NO_LINE"
+ /** The source code corresponding to the range, if this is a range position.
+ * Otherwise the empty string.
+ */
+ def sourceCode = ""
+ def sourceName = "<none>"
+ def sourcePath = "<none>"
+ def lineContent = "<none>"
+ def lengthInChars = 0
+ def lengthInLines = 0
/** Map this position to a position in an original source
* file. If the SourceFile is a normal SourceFile, simply
@@ -240,7 +262,10 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
override def withPoint(off: Int) = new OffsetPosition(source, off)
override def withSource(source: SourceFile, shift: Int) = new OffsetPosition(source, point + shift)
- override def line: Int = source.offsetToLine(point) + 1
+ override def line = source.offsetToLine(point) + 1
+ override def sourceName = source.file.name
+ override def sourcePath = source.file.path
+ override def lineContent = source.lineToString(line - 1)
override def column: Int = {
var idx = source.lineToOffset(source.offsetToLine(point))
@@ -266,46 +291,3 @@ class OffsetPosition(override val source: SourceFile, override val point: Int) e
}
override def show = "["+point+"]"
}
-
-/** new for position ranges */
-class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
-extends OffsetPosition(source, point) {
- if (start > end) sys.error("bad position: "+show)
- override def isRange: Boolean = true
- override def isOpaqueRange: Boolean = true
- override def startOrPoint: Int = start
- override def endOrPoint: Int = end
- override def withStart(off: Int) = new RangePosition(source, off, point, end)
- override def withEnd(off: Int) = new RangePosition(source, start, point, off)
- override def withPoint(off: Int) = new RangePosition(source, start, off, end)
- override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
- override def focusStart = new OffsetPosition(source, start)
- override def focus = {
- if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
- focusCache
- }
- override def focusEnd = new OffsetPosition(source, end)
- override def makeTransparent = new TransparentPosition(source, start, point, end)
- override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
- override def union(pos: Position): Position =
- if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
-
- override def toSingleLine: Position = source match {
- case bs: BatchSourceFile
- if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
- val pointLine = bs.offsetToLine(point)
- new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
- case _ => this
- }
-
- override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
- override def show = "["+start+":"+end+"]"
- private var focusCache: Position = NoPosition
-}
-
-class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
- override def isOpaqueRange: Boolean = false
- override def isTransparent = true
- override def makeTransparent = this
- override def show = "<"+start+":"+end+">"
-}
diff --git a/src/reflect/scala/reflect/internal/util/RangePosition.scala b/src/reflect/scala/reflect/internal/util/RangePosition.scala
new file mode 100644
index 0000000000..0d09a53cd9
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/RangePosition.scala
@@ -0,0 +1,50 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package reflect.internal.util
+
+/** new for position ranges */
+class RangePosition(source: SourceFile, override val start: Int, point: Int, override val end: Int)
+extends OffsetPosition(source, point) {
+ if (start > end) scala.sys.error("bad position: "+show)
+ override def isRange: Boolean = true
+ override def isOpaqueRange: Boolean = true
+ override def startOrPoint: Int = start
+ override def endOrPoint: Int = end
+ override def withStart(off: Int) = new RangePosition(source, off, point, end)
+ override def withEnd(off: Int) = new RangePosition(source, start, point, off)
+ override def withPoint(off: Int) = new RangePosition(source, start, off, end)
+ override def withSource(source: SourceFile, shift: Int) = new RangePosition(source, start + shift, point + shift, end + shift)
+ override def focusStart = new OffsetPosition(source, start)
+ override def focus = {
+ if (focusCache eq NoPosition) focusCache = new OffsetPosition(source, point)
+ focusCache
+ }
+ override def focusEnd = new OffsetPosition(source, end)
+ override def makeTransparent = new TransparentPosition(source, start, point, end)
+ override def includes(pos: Position) = pos.isDefined && start <= pos.startOrPoint && pos.endOrPoint <= end
+ override def union(pos: Position): Position =
+ if (pos.isRange) new RangePosition(source, start min pos.start, point, end max pos.end) else this
+
+ override def toSingleLine: Position = source match {
+ case bs: BatchSourceFile
+ if end > 0 && bs.offsetToLine(start) < bs.offsetToLine(end - 1) =>
+ val pointLine = bs.offsetToLine(point)
+ new RangePosition(source, bs.lineToOffset(pointLine), point, bs.lineToOffset(pointLine + 1))
+ case _ => this
+ }
+
+ override def toString = "RangePosition("+source.file.canonicalPath+", "+start+", "+point+", "+end+")"
+ override def show = "["+start+":"+end+"]"
+ private var focusCache: Position = NoPosition
+}
+
+class TransparentPosition(source: SourceFile, start: Int, point: Int, end: Int) extends RangePosition(source, start, point, end) {
+ override def isOpaqueRange: Boolean = false
+ override def isTransparent = true
+ override def makeTransparent = this
+ override def show = "<"+start+":"+end+">"
+}
diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
index 1f6fa68f57..a7fd787dfc 100644
--- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
+++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala
@@ -3,8 +3,8 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
-package util
+package scala
+package reflect.internal.util
import java.lang.{ ClassLoader => JClassLoader }
import java.lang.reflect.{ Constructor, Modifier, Method }
@@ -34,9 +34,9 @@ trait ScalaClassLoader extends JClassLoader {
def setAsContext() { setContext(this) }
/** Load and link a class with this classloader */
- def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, false)
+ def tryToLoadClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = false)
/** Load, link and initialize a class with this classloader */
- def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, true)
+ def tryToInitializeClass[T <: AnyRef](path: String): Option[Class[T]] = tryClass(path, initialize = true)
private def tryClass[T <: AnyRef](path: String, initialize: Boolean): Option[Class[T]] =
catching(classOf[ClassNotFoundException], classOf[SecurityException]) opt
@@ -46,13 +46,10 @@ trait ScalaClassLoader extends JClassLoader {
def create(path: String): AnyRef =
tryToInitializeClass[AnyRef](path) map (_.newInstance()) orNull
- def constructorsOf[T <: AnyRef : ClassTag]: List[Constructor[T]] =
- classTag[T].runtimeClass.getConstructors.toList map (_.asInstanceOf[Constructor[T]])
-
/** The actual bytes for a class file, or an empty array if it can't be found. */
def classBytes(className: String): Array[Byte] = classAsStream(className) match {
case null => Array()
- case stream => io.Streamable.bytes(stream)
+ case stream => scala.reflect.io.Streamable.bytes(stream)
}
/** An InputStream representing the given class name, or null if not found. */
@@ -71,14 +68,6 @@ trait ScalaClassLoader extends JClassLoader {
try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) // !!! : AnyRef shouldn't be necessary
catch unwrapHandler({ case ex => throw ex })
}
-
- /** A list comprised of this classloader followed by all its
- * (non-null) parent classloaders, if any.
- */
- def loaderChain: List[ScalaClassLoader] = this :: (getParent match {
- case null => Nil
- case p => p.loaderChain
- })
}
/** Methods for obtaining various classloaders.
@@ -99,35 +88,6 @@ object ScalaClassLoader {
}
def contextLoader = apply(Thread.currentThread.getContextClassLoader)
def appLoader = apply(JClassLoader.getSystemClassLoader)
- def extLoader = apply(appLoader.getParent)
- def bootLoader = apply(null)
- def contextChain = loaderChain(contextLoader)
-
- def pathToErasure[T: ClassTag] = pathToClass(classTag[T].runtimeClass)
- def pathToClass(clazz: Class[_]) = clazz.getName.replace('.', JFile.separatorChar) + ".class"
- def locate[T: ClassTag] = contextLoader getResource pathToErasure[T]
-
- /** Tries to guess the classpath by type matching the context classloader
- * and its parents, looking for any classloaders which will reveal their
- * classpath elements as urls. It it can't find any, creates a classpath
- * from the supplied string.
- */
- def guessClassPathString(default: String = ""): String = {
- val classpathURLs = contextChain flatMap {
- case x: HasClassPath => x.classPathURLs
- case x: JURLClassLoader => x.getURLs.toSeq
- case _ => Nil
- }
- if (classpathURLs.isEmpty) default
- else JavaClassPath.fromURLs(classpathURLs).asClasspathString
- }
-
- def loaderChain(head: JClassLoader) = {
- def loop(cl: JClassLoader): List[JClassLoader] =
- if (cl == null) Nil else cl :: loop(cl.getParent)
-
- loop(head)
- }
def setContext(cl: JClassLoader) =
Thread.currentThread.setContextClassLoader(cl)
def savingContextLoader[T](body: => T): T = {
@@ -142,16 +102,13 @@ object ScalaClassLoader {
with HasClassPath {
private var classloaderURLs: Seq[URL] = urls
- private def classpathString = ClassPath.fromURLs(urls: _*)
def classPathURLs: Seq[URL] = classloaderURLs
- def classPath: ClassPath[_] = JavaClassPath fromURLs classPathURLs
/** Override to widen to public */
override def addURL(url: URL) = {
classloaderURLs :+= url
super.addURL(url)
}
- def toLongString = urls.mkString("URLClassLoader(\n ", "\n ", "\n)\n")
}
def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader =
@@ -162,7 +119,6 @@ object ScalaClassLoader {
fromURLs(urls) tryToLoadClass name isDefined
/** Finding what jar a clazz or instance came from */
- def origin(x: Any): Option[URL] = originOfClass(x.getClass)
def originOfClass(x: Class[_]): Option[URL] =
Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation))
}
diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala
index 36bdb8174a..75dcfaa59b 100644
--- a/src/reflect/scala/reflect/internal/util/Set.scala
+++ b/src/reflect/scala/reflect/internal/util/Set.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** A common class for lightweight sets.
*/
@@ -18,8 +19,6 @@ abstract class Set[T <: AnyRef] {
def apply(x: T): Boolean = contains(x)
- @deprecated("use `iterator` instead", "2.9.0") def elements = iterator
-
def contains(x: T): Boolean =
findEntry(x) ne null
diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala
index bc2d0ee4db..ea4c9a9b68 100644
--- a/src/reflect/scala/reflect/internal/util/SourceFile.scala
+++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala
@@ -4,7 +4,8 @@
*/
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.reflect.io.{ AbstractFile, VirtualFile }
import scala.collection.mutable.ArrayBuffer
@@ -24,7 +25,6 @@ abstract class SourceFile {
assert(offset < length, file + ": " + offset + " >= " + length)
new OffsetPosition(this, offset)
}
- def position(line: Int, column: Int) : Position = new OffsetPosition(this, lineToOffset(line) + column)
def offsetToLine(offset: Int): Int
def lineToOffset(index : Int): Int
@@ -37,9 +37,6 @@ abstract class SourceFile {
def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString
def path = file.path
- def beginsWith(offset: Int, text: String): Boolean =
- (content drop offset) startsWith text
-
def lineToString(index: Int): String =
content drop lineToOffset(index) takeWhile (c => !isLineBreakChar(c.toChar)) mkString ""
@@ -81,7 +78,6 @@ object ScriptSourceFile {
}
else 0
}
- def stripHeader(cs: Array[Char]): Array[Char] = cs drop headerLength(cs)
def apply(file: AbstractFile, content: Array[Char]) = {
val underlying = new BatchSourceFile(file, content)
@@ -91,7 +87,6 @@ object ScriptSourceFile {
stripped
}
}
-import ScriptSourceFile._
class ScriptSourceFile(underlying: BatchSourceFile, content: Array[Char], override val start: Int) extends BatchSourceFile(underlying.file, content) {
override def isSelfContained = false
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index cbd27b0d65..b583137059 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.collection.mutable
@@ -102,8 +103,8 @@ quant)
for ((_, q) <- qs if q.underlying == q;
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
- private def showPercent(x: Double, base: Double) =
- if (base == 0) "" else f" (${x / base * 100}%2.1f%%)"
+ private def showPercent(x: Long, base: Long) =
+ if (base == 0) "" else f" (${x.toDouble / base.toDouble * 100}%2.1f%%)"
/** The base trait for quantities.
* Quantities with non-empty prefix are printed in the statistics info.
@@ -155,7 +156,7 @@ quant)
value = value0 + underlying.value - uvalue0
}
override def toString =
- value + showPercent(value, underlying.value)
+ value + showPercent(value.toLong, underlying.value.toLong)
}
class Timer(val prefix: String, val phases: Seq[String]) extends Quantity {
@@ -257,7 +258,6 @@ quant)
def enabled = _enabled
def enabled_=(cond: Boolean) = {
if (cond && !_enabled) {
- val test = new Timer("", Nil)
val start = System.nanoTime()
var total = 0L
for (i <- 1 to 10000) {
diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala
index 8f6c409e0b..4d98a344d8 100644
--- a/src/reflect/scala/reflect/internal/util/StringOps.scala
+++ b/src/reflect/scala/reflect/internal/util/StringOps.scala
@@ -6,8 +6,8 @@
** |/ **
\* */
-
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** This object provides utility methods to extract elements
* from Strings.
@@ -16,22 +16,14 @@ package scala.reflect.internal.util
* @version 1.0
*/
trait StringOps {
- def onull(s: String) = if (s == null) "" else s
- def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
- def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
- def ojoin(xs: Seq[String], sep: String): String = oempty(xs: _*) mkString sep
- def ojoinOr(xs: Seq[String], sep: String, orElse: String) = {
- val ys = oempty(xs: _*)
- if (ys.isEmpty) orElse else ys mkString sep
- }
- def trimTrailingSpace(s: String) = {
- if (s.length == 0 || !s.charAt(s.length - 1).isWhitespace) s
- else {
- var idx = s.length - 1
- while (idx >= 0 && s.charAt(idx).isWhitespace)
- idx -= 1
-
- s.substring(0, idx + 1)
+ def oempty(xs: String*) = xs filterNot (x => x == null || x == "")
+ def ojoin(xs: String*): String = oempty(xs: _*) mkString " "
+ def longestCommonPrefix(xs: List[String]): String = {
+ if (xs.isEmpty || xs.contains("")) ""
+ else xs.head.head match {
+ case ch =>
+ if (xs.tail forall (_.head == ch)) "" + ch + longestCommonPrefix(xs map (_.tail))
+ else ""
}
}
@@ -49,14 +41,6 @@ trait StringOps {
def words(str: String): List[String] = decompose(str, ' ')
- def stripPrefixOpt(str: String, prefix: String): Option[String] =
- if (str startsWith prefix) Some(str drop prefix.length)
- else None
-
- def stripSuffixOpt(str: String, suffix: String): Option[String] =
- if (str endsWith suffix) Some(str dropRight suffix.length)
- else None
-
def splitWhere(str: String, f: Char => Boolean, doDropIndex: Boolean = false): Option[(String, String)] =
splitAt(str, str indexWhere f, doDropIndex)
@@ -65,10 +49,6 @@ trait StringOps {
else Some((str take idx, str drop (if (doDropIndex) idx + 1 else idx)))
/** Returns a string meaning "n elements".
- *
- * @param n ...
- * @param elements ...
- * @return ...
*/
def countElementsAsString(n: Int, elements: String): String =
n match {
@@ -81,9 +61,6 @@ trait StringOps {
}
/** Turns a count into a friendly English description if n<=4.
- *
- * @param n ...
- * @return ...
*/
def countAsString(n: Int): String =
n match {
diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
index e7579229b2..e622e78d57 100644
--- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
+++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package internal
package util
@@ -6,7 +7,7 @@ trait StripMarginInterpolator {
def stringContext: StringContext
/**
- * A safe combination of `[[scala.collection.immutable.StringLike#stripMargin]]
+ * A safe combination of [[scala.collection.immutable.StringLike#stripMargin]]
* and [[scala.StringContext#raw]].
*
* The margin of each line is defined by whitespace leading up to a '|' character.
diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala
index 8e2bcc2ff7..1626da2c93 100644
--- a/src/reflect/scala/reflect/internal/util/TableDef.scala
+++ b/src/reflect/scala/reflect/internal/util/TableDef.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import TableDef._
import scala.language.postfixOps
@@ -19,8 +20,8 @@ class TableDef[T](_cols: Column[T]*) {
* if none is specified, a space is used.
*/
def ~(next: Column[T]) = retThis(cols :+= next)
- def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, false)
- def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, true)
+ def >>(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = false)
+ def <<(pair: (String, T => Any)) = this ~ Column(pair._1, pair._2, left = true)
def >+(sep: String) = retThis(separators += ((cols.size - 1, sep)))
/** Below this point should all be considered private/internal.
@@ -67,12 +68,6 @@ class TableDef[T](_cols: Column[T]*) {
override def toString = allToSeq mkString "\n"
}
- def formatterFor(rows: Seq[T]): T => String = {
- val formatStr = new Table(rows).rowFormat
-
- x => formatStr.format(colApply(x) : _*)
- }
-
def table(rows: Seq[T]) = new Table(rows)
override def toString = cols.mkString("TableDef(", ", ", ")")
diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
index f89bd9e199..18410510cb 100644
--- a/src/reflect/scala/reflect/internal/util/ThreeValues.scala
+++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
/** A simple three value type for booleans with an unknown value */
object ThreeValues {
diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
index fa83f70f3a..97cc19952c 100644
--- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
+++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala
@@ -1,4 +1,5 @@
-package scala.reflect.internal
+package scala
+package reflect.internal
package util
import scala.collection.{ mutable, immutable }
@@ -12,13 +13,9 @@ trait TraceSymbolActivity {
if (enabled && global.isCompilerUniverse)
scala.sys addShutdownHook showAllSymbols()
- private type Set[T] = scala.collection.immutable.Set[T]
- private val Set = scala.collection.immutable.Set
-
val allSymbols = mutable.Map[Int, Symbol]()
val allChildren = mutable.Map[Int, List[Int]]() withDefaultValue Nil
val prevOwners = mutable.Map[Int, List[(Int, Phase)]]() withDefaultValue Nil
- val symsCaused = mutable.Map[Int, Int]() withDefaultValue 0
val allTrees = mutable.Set[Tree]()
def recordSymbolsInTree(tree: Tree) {
@@ -44,38 +41,6 @@ trait TraceSymbolActivity {
}
}
- /** TODO.
- */
- private def reachableDirectlyFromSymbol(sym: Symbol): List[Symbol] = (
- List(sym.owner, sym.alias, sym.thisSym)
- ++ sym.children
- ++ sym.info.parents.map(_.typeSymbol)
- ++ sym.typeParams
- ++ sym.paramss.flatten
- )
- private def reachable[T](inputs: Traversable[T], mkSymbol: T => Symbol): Set[Symbol] = {
- def loop(seen: Set[Symbol], remaining: List[Symbol]): Set[Symbol] = {
- remaining match {
- case Nil => seen
- case head :: rest =>
- if ((head eq null) || (head eq NoSymbol) || seen(head)) loop(seen, rest)
- else loop(seen + head, rest ++ reachableDirectlyFromSymbol(head).filterNot(seen))
- }
- }
- loop(immutable.Set(), inputs.toList map mkSymbol filterNot (_ eq null) distinct)
- }
- private def treeList(t: Tree) = {
- val buf = mutable.ListBuffer[Tree]()
- t foreach (buf += _)
- buf.toList
- }
-
- private def reachableFromSymbol(root: Symbol): Set[Symbol] =
- reachable[Symbol](List(root, root.info.typeSymbol), x => x)
-
- private def reachableFromTree(tree: Tree): Set[Symbol] =
- reachable[Tree](treeList(tree), _.symbol)
-
private def signature(id: Int) = runBeforeErasure(allSymbols(id).defString)
private def dashes(s: Any): String = ("" + s) map (_ => '-')
@@ -119,7 +84,7 @@ trait TraceSymbolActivity {
}
println("\n")
}
- private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String = (x: U) => "" + x) = {
+ private def showFreq[T, U](xs: Traversable[T])(groupFn: T => U, showFn: U => String) = {
showMapFreq(xs.toList groupBy groupFn)(showFn)
}
private lazy val findErasurePhase: Phase = {
@@ -129,7 +94,7 @@ trait TraceSymbolActivity {
}
ph
}
- private def runBeforeErasure[T](body: => T): T = atPhase(findErasurePhase)(body)
+ private def runBeforeErasure[T](body: => T): T = enteringPhase(findErasurePhase)(body)
def showAllSymbols() {
if (!enabled) return
diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala
new file mode 100644
index 0000000000..c7a35d4637
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/TriState.scala
@@ -0,0 +1,26 @@
+package scala
+package reflect
+package internal
+package util
+
+import TriState._
+
+/** A simple true/false/unknown value, for those days when
+ * true and false don't quite partition the space.
+ */
+final class TriState private (val value: Int) extends AnyVal {
+ def isKnown = this != Unknown
+ def booleanValue = this match {
+ case True => true
+ case False => false
+ case _ => sys.error("Not a Boolean value")
+ }
+}
+
+object TriState {
+ implicit def booleanToTriState(b: Boolean): TriState = if (b) True else False
+
+ val Unknown = new TriState(-1)
+ val False = new TriState(0)
+ val True = new TriState(1)
+}
diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
index 9882aad5e5..ef62fa481b 100644
--- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
+++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala
@@ -1,9 +1,7 @@
-package scala.reflect.internal.util
+package scala
+package reflect.internal.util
import scala.collection.mutable
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.mutable.Builder
-import scala.collection.mutable.SetBuilder
import scala.collection.generic.Clearable
import scala.runtime.AbstractFunction1
diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala
index 6d77235db6..49164d366c 100644
--- a/src/reflect/scala/reflect/internal/util/package.scala
+++ b/src/reflect/scala/reflect/internal/util/package.scala
@@ -1,7 +1,37 @@
-package scala.reflect
+package scala
+package reflect
package internal
+import scala.language.existentials // SI-6541
+
package object util {
+ import StringOps.longestCommonPrefix
+
+ // Shorten a name like Symbols$FooSymbol to FooSymbol.
+ private def shortenName(name: String): String = {
+ if (name == "") return ""
+ val segments = (name split '$').toList
+ val last = segments.last
+
+ if (last.length == 0)
+ segments takeRight 2 mkString "$"
+ else
+ last
+ }
+
+ def shortClassOfInstance(x: AnyRef): String = shortClass(x.getClass)
+ def shortClass(clazz: Class[_]): String = {
+ val name: String = (clazz.getName split '.').last
+ def isModule = name endsWith "$" // object
+ def isAnon = (name split '$').last forall (_.isDigit) // anonymous class
+
+ if (isModule)
+ (name split '$' filterNot (_ == "")).last + "$"
+ else if (isAnon)
+ clazz.getSuperclass :: clazz.getInterfaces.toList map (c => shortClass(c)) mkString " with "
+ else
+ shortenName(name)
+ }
/**
* Adds the `sm` String interpolator to a [[scala.StringContext]].
*/
diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala
index 15befb67f1..ac1159b2ac 100644
--- a/src/reflect/scala/reflect/io/AbstractFile.scala
+++ b/src/reflect/scala/reflect/io/AbstractFile.scala
@@ -4,19 +4,21 @@
*/
-package scala.reflect
+package scala
+package reflect
package io
-import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream }
+import java.io.{ FileOutputStream, IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream }
import java.io.{ File => JFile }
import java.net.URL
import scala.collection.mutable.ArrayBuffer
+import scala.reflect.internal.util.Statistics
/**
* An abstraction over files for use in the reflection/compiler libraries.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
- *
+ *
* @author Philippe Altherr
* @version 1.0, 23/03/2004
*/
@@ -27,7 +29,7 @@ object AbstractFile {
/**
* If the specified File exists and is a regular file, returns an
- * abstract regular file backed by it. Otherwise, returns <code>null</code>.
+ * abstract regular file backed by it. Otherwise, returns `null`.
*/
def getFile(file: File): AbstractFile =
if (file.isFile) new PlainFile(file) else null
@@ -38,10 +40,7 @@ object AbstractFile {
/**
* If the specified File exists and is either a directory or a
* readable zip or jar archive, returns an abstract directory
- * backed by it. Otherwise, returns <code>null</code>.
- *
- * @param file ...
- * @return ...
+ * backed by it. Otherwise, returns `null`.
*/
def getDirectory(file: File): AbstractFile =
if (file.isDirectory) new PlainFile(file)
@@ -51,15 +50,14 @@ object AbstractFile {
/**
* If the specified URL exists and is a readable zip or jar archive,
* returns an abstract directory backed by it. Otherwise, returns
- * <code>null</code>.
- *
- * @param file ...
- * @return ...
+ * `null`.
*/
def getURL(url: URL): AbstractFile = {
if (url == null || !Path.isExtensionJarOrZip(url.getPath)) null
else ZipArchive fromURL url
}
+
+ def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
/**
@@ -80,12 +78,12 @@ object AbstractFile {
* </p>
* <p>
* The interface does <b>not</b> allow to access the content.
- * The class <code>symtab.classfile.AbstractFileReader</code> accesses
+ * The class `symtab.classfile.AbstractFileReader` accesses
* bytes, knowing that the character set of classfiles is UTF-8. For
- * all other cases, the class <code>SourceFile</code> is used, which honors
- * <code>global.settings.encoding.value</code>.
+ * all other cases, the class `SourceFile` is used, which honors
+ * `global.settings.encoding.value`.
* </p>
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
abstract class AbstractFile extends Iterable[AbstractFile] {
@@ -116,7 +114,10 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
def underlyingSource: Option[AbstractFile] = None
/** Does this abstract file denote an existing file? */
- def exists: Boolean = (file eq null) || file.exists
+ def exists: Boolean = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount)
+ (file eq null) || file.exists
+ }
/** Does this abstract file represent something which can contain classfiles? */
def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
@@ -130,6 +131,9 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
/** Is this abstract file a directory? */
def isDirectory: Boolean
+ /** Does this abstract file correspond to something on-disk? */
+ def isVirtual: Boolean = false
+
/** Returns the time that this abstract file was last modified. */
def lastModified: Long
@@ -148,7 +152,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
def toURL: URL = if (file == null) null else file.toURI.toURL
/** Returns contents of file (if applicable) in a Char array.
- * warning: use <code>Global.getSourceFile()</code> to use the proper
+ * warning: use `Global.getSourceFile()` to use the proper
* encoding when converting to the char array.
*/
@throws(classOf[IOException])
@@ -159,24 +163,36 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
@throws(classOf[IOException])
def toByteArray: Array[Byte] = {
val in = input
- var rest = sizeOption.getOrElse(0)
- val arr = new Array[Byte](rest)
- while (rest > 0) {
- val res = in.read(arr, arr.length - rest, rest)
- if (res == -1)
- throw new IOException("read error")
- rest -= res
+ sizeOption match {
+ case Some(size) =>
+ var rest = size
+ val arr = new Array[Byte](rest)
+ while (rest > 0) {
+ val res = in.read(arr, arr.length - rest, rest)
+ if (res == -1)
+ throw new IOException("read error")
+ rest -= res
+ }
+ in.close()
+ arr
+ case None =>
+ val out = new ByteArrayOutputStream()
+ var c = in.read()
+ while(c != -1) {
+ out.write(c)
+ c = in.read()
+ }
+ in.close()
+ out.toByteArray()
}
- in.close()
- arr
}
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile]
/** Returns the abstract file in this abstract directory with the specified
- * name. If there is no such file, returns <code>null</code>. The argument
- * <code>directory</code> tells whether to look for a directory or
+ * name. If there is no such file, returns `null`. The argument
+ * `directory` tells whether to look for a directory or
* a regular file.
*/
def lookupName(name: String, directory: Boolean): AbstractFile
@@ -186,19 +202,6 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
*/
def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
- /** Returns the abstract file in this abstract directory with the specified
- * path relative to it, If there is no such file, returns null. The argument
- * <code>directory</code> tells whether to look for a directory or a regular
- * file.
- *
- * @param path ...
- * @param directory ...
- * @return ...
- */
- def lookupPath(path: String, directory: Boolean): AbstractFile = {
- lookup((f, p, dir) => f.lookupName(p, dir), path, directory)
- }
-
/** Return an abstract file that does not check that `path` denotes
* an existing file.
*/
@@ -243,7 +246,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
*/
def fileNamed(name: String): AbstractFile = {
assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- fileOrSubdirectoryNamed(name, false)
+ fileOrSubdirectoryNamed(name, isDir = false)
}
/**
@@ -252,7 +255,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] {
*/
def subdirectoryNamed(name: String): AbstractFile = {
assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
- fileOrSubdirectoryNamed(name, true)
+ fileOrSubdirectoryNamed(name, isDir = true)
}
protected def unsupported(): Nothing = unsupported(null)
diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala
index c040d1eac5..2b965e6d69 100644
--- a/src/reflect/scala/reflect/io/Directory.scala
+++ b/src/reflect/scala/reflect/io/Directory.scala
@@ -6,7 +6,8 @@
** |/ **
\* */
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{ File => JFile }
@@ -14,12 +15,10 @@ import java.io.{ File => JFile }
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Directory {
- import scala.util.Properties.{ tmpDir, userHome, userDir }
+ import scala.util.Properties.userDir
private def normalizePath(s: String) = Some(apply(Path(s).normalize))
def Current: Option[Directory] = if (userDir == "") None else normalizePath(userDir)
- def Home: Option[Directory] = if (userHome == "") None else normalizePath(userHome)
- def TmpDir: Option[Directory] = if (tmpDir == "") None else normalizePath(tmpDir)
def apply(path: Path): Directory = path.toDirectory
@@ -30,20 +29,18 @@ object Directory {
path.createDirectory()
}
}
-import Path._
/** An abstraction for directories.
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
*/
class Directory(jfile: JFile) extends Path(jfile) {
override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory
override def toDirectory: Directory = this
override def toFile: File = new File(jfile)
- override def isValid = jfile.isDirectory() || !jfile.exists()
override def normalize: Directory = super.normalize.toDirectory
/** An iterator over the contents of this directory.
@@ -60,7 +57,6 @@ class Directory(jfile: JFile) extends Path(jfile) {
override def walkFilter(cond: Path => Boolean): Iterator[Path] =
list filter cond flatMap (_ walkFilter cond)
- def deepDirs: Iterator[Directory] = Path.onlyDirs(deepList())
def deepFiles: Iterator[File] = Path.onlyFiles(deepList())
/** If optional depth argument is not given, will recurse
@@ -70,10 +66,4 @@ class Directory(jfile: JFile) extends Path(jfile) {
if (depth < 0) list ++ (dirs flatMap (_ deepList (depth)))
else if (depth == 0) Iterator.empty
else list ++ (dirs flatMap (_ deepList (depth - 1)))
-
- /** An iterator over the directories underneath this directory,
- * to the (optionally) given depth.
- */
- def subdirs(depth: Int = 1): Iterator[Directory] =
- deepList(depth) collect { case x: Directory => x }
}
diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala
index 736ba5d51e..a9c6807e88 100644
--- a/src/reflect/scala/reflect/io/File.scala
+++ b/src/reflect/scala/reflect/io/File.scala
@@ -7,13 +7,16 @@
\* */
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable }
-import java.io.{ File => JFile }
+ BufferedInputStream, BufferedOutputStream, IOException, PrintStream, PrintWriter, Closeable => JCloseable,
+ File => JFile
+}
+
import java.nio.channels.{ Channel, FileChannel }
import scala.io.Codec
import scala.language.{reflectiveCalls, implicitConversions}
@@ -22,8 +25,7 @@ import scala.language.{reflectiveCalls, implicitConversions}
*/
object File {
def pathSeparator = java.io.File.pathSeparator
- def separator = java.io.File.separator
-
+ def separator = java.io.File.separator
def apply(path: Path)(implicit codec: Codec) = new File(path.jfile)(codec)
// Create a temporary file, which will be deleted upon jvm exit.
@@ -32,41 +34,7 @@ object File {
jfile.deleteOnExit()
apply(jfile)
}
-
- type HasClose = { def close(): Unit }
-
- def closeQuietly(target: HasClose) {
- try target.close() catch { case e: IOException => }
- }
- def closeQuietly(target: JCloseable) {
- try target.close() catch { case e: IOException => }
- }
-
- // this is a workaround for http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6503430
- // we are using a static initializer to statically initialize a java class so we don't
- // trigger java.lang.InternalErrors later when using it concurrently. We ignore all
- // the exceptions so as not to cause spurious failures when no write access is available,
- // e.g. google app engine.
- //
- // XXX need to put this behind a setting.
- //
- // try {
- // import Streamable.closing
- // val tmp = java.io.File.createTempFile("bug6503430", null, null)
- // try closing(new FileInputStream(tmp)) { in =>
- // val inc = in.getChannel()
- // closing(new FileOutputStream(tmp, true)) { out =>
- // out.getChannel().transferFrom(inc, 0, 0)
- // }
- // }
- // finally tmp.delete()
- // }
- // catch {
- // case _: IllegalArgumentException | _: IllegalStateException | _: IOException | _: SecurityException => ()
- // }
}
-import File._
-import Path._
/** An abstraction for files. For character data, a Codec
* can be supplied at either creation time or when a method
@@ -76,19 +44,17 @@ import Path._
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This is library is considered experimental and should not be used unless you know what you are doing.''
*/
class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) with Streamable.Chars {
override val creationCodec = constructorCodec
- def withCodec(codec: Codec): File = new File(jfile)(codec)
override def addExtension(ext: String): File = super.addExtension(ext).toFile
override def toAbsolute: File = if (isAbsolute) this else super.toAbsolute.toFile
override def toDirectory: Directory = new Directory(jfile)
override def toFile: File = this
override def normalize: File = super.normalize.toFile
- override def isValid = jfile.isFile() || !jfile.exists()
override def length = super[Path].length
override def walkFilter(cond: Path => Boolean): Iterator[Path] =
if (cond(this)) Iterator.single(this) else Iterator.empty
@@ -99,26 +65,22 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
/** Obtains a OutputStream. */
def outputStream(append: Boolean = false) = new FileOutputStream(jfile, append)
def bufferedOutput(append: Boolean = false) = new BufferedOutputStream(outputStream(append))
- def printStream(append: Boolean = false) = new PrintStream(outputStream(append), true)
/** Obtains an OutputStreamWriter wrapped around a FileOutputStream.
* This should behave like a less broken version of java.io.FileWriter,
* in that unlike the java version you can specify the encoding.
*/
- def writer(): OutputStreamWriter = writer(false)
- def writer(append: Boolean): OutputStreamWriter = writer(append, creationCodec)
def writer(append: Boolean, codec: Codec): OutputStreamWriter =
new OutputStreamWriter(outputStream(append), codec.charSet)
/** Wraps a BufferedWriter around the result of writer().
*/
- def bufferedWriter(): BufferedWriter = bufferedWriter(false)
+ def bufferedWriter(): BufferedWriter = bufferedWriter(append = false)
def bufferedWriter(append: Boolean): BufferedWriter = bufferedWriter(append, creationCodec)
def bufferedWriter(append: Boolean, codec: Codec): BufferedWriter =
new BufferedWriter(writer(append, codec))
def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true)
- def printWriter(append: Boolean): PrintWriter = new PrintWriter(bufferedWriter(append), true)
/** Creates a new file and writes all the Strings to it. */
def writeAll(strings: String*): Unit = {
@@ -127,12 +89,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
finally out.close()
}
- def writeBytes(bytes: Array[Byte]): Unit = {
- val out = bufferedOutput()
- try out write bytes
- finally out.close()
- }
-
def appendAll(strings: String*): Unit = {
val out = bufferedWriter(append = true)
try strings foreach (out write _)
@@ -150,39 +106,6 @@ class File(jfile: JFile)(implicit constructorCodec: Codec) extends Path(jfile) w
try Some(slurp())
catch { case _: IOException => None }
- def copyTo(destPath: Path, preserveFileDate: Boolean = false): Boolean = {
- val CHUNK = 1024 * 1024 * 16 // 16 MB
- val dest = destPath.toFile
- if (!isValid) fail("Source %s is not a valid file." format name)
- if (this.normalize == dest.normalize) fail("Source and destination are the same.")
- if (!dest.parent.exists) fail("Destination cannot be created.")
- if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
- if (dest.isDirectory) fail("Destination exists but is a directory.")
-
- lazy val in_s = inputStream()
- lazy val out_s = dest.outputStream()
- lazy val in = in_s.getChannel()
- lazy val out = out_s.getChannel()
-
- try {
- val size = in.size()
- var pos, count = 0L
- while (pos < size) {
- count = (size - pos) min CHUNK
- pos += out.transferFrom(in, pos, count)
- }
- }
- finally List[HasClose](out, out_s, in, in_s) foreach closeQuietly
-
- if (this.length != dest.length)
- fail("Failed to completely copy %s to %s".format(name, dest.name))
-
- if (preserveFileDate)
- dest.lastModified = this.lastModified
-
- true
- }
-
/** Reflection since we're into the java 6+ API.
*/
def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = {
diff --git a/src/reflect/scala/reflect/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala
index 13a1322798..fdfe0234e0 100644
--- a/src/reflect/scala/reflect/io/FileOperationException.scala
+++ b/src/reflect/scala/reflect/io/FileOperationException.scala
@@ -7,7 +7,8 @@
\* */
-package scala.reflect
+package scala
+package reflect
package io
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
case class FileOperationException(msg: String) extends RuntimeException(msg)
diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala
new file mode 100644
index 0000000000..71f8be330d
--- /dev/null
+++ b/src/reflect/scala/reflect/io/IOStats.scala
@@ -0,0 +1,32 @@
+package scala
+package reflect.io
+
+import scala.reflect.internal.util.Statistics
+
+// Due to limitations in the Statistics machinery, these are only
+// reported if this patch is applied.
+//
+// --- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+// +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
+// @@ -109,7 +109,7 @@ quant)
+// * Quantities with non-empty prefix are printed in the statistics info.
+// */
+// trait Quantity {
+// - if (enabled && prefix.nonEmpty) {
+// + if (prefix.nonEmpty) {
+// val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
+// qs(key) = this
+// }
+// @@ -243,7 +243,7 @@ quant)
+// *
+// * to remove all Statistics code from build
+// */
+// - final val canEnable = _enabled
+// + final val canEnable = true // _enabled
+//
+// We can commit this change as the first diff reverts a fix for an IDE memory leak.
+private[io] object IOStats {
+ val fileExistsCount = Statistics.newCounter("# File.exists calls")
+ val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls")
+ val fileIsFileCount = Statistics.newCounter("# File.isFile calls")
+}
diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala
index 8c88d3abf6..a4e869ed41 100644
--- a/src/reflect/scala/reflect/io/NoAbstractFile.scala
+++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala
@@ -3,15 +3,15 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.io.InputStream
-import java.io.{ File => JFile }
/** A distinguished object so you can avoid both null
* and Option.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object NoAbstractFile extends AbstractFile {
@@ -19,9 +19,10 @@ object NoAbstractFile extends AbstractFile {
def container: AbstractFile = this
def create(): Unit = ???
def delete(): Unit = ???
- def file: JFile = null
+ def file: java.io.File = null
def input: InputStream = null
def isDirectory: Boolean = false
+ override def isVirtual: Boolean = true
def iterator: Iterator[AbstractFile] = Iterator.empty
def lastModified: Long = 0L
def lookupName(name: String, directory: Boolean): AbstractFile = null
diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala
index 36fdc04db4..15fce953f2 100644
--- a/src/reflect/scala/reflect/io/Path.scala
+++ b/src/reflect/scala/reflect/io/Path.scala
@@ -3,16 +3,17 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{
FileInputStream, FileOutputStream, BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter,
- BufferedInputStream, BufferedOutputStream, RandomAccessFile }
-import java.io.{ File => JFile }
+ BufferedInputStream, BufferedOutputStream, RandomAccessFile, File => JFile }
import java.net.{ URI, URL }
import scala.util.Random.alphanumeric
import scala.language.implicitConversions
+import scala.reflect.internal.util.Statistics
/** An abstraction for filesystem paths. The differences between
* Path, File, and Directory are primarily to communicate intent.
@@ -27,7 +28,7 @@ import scala.language.implicitConversions
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Path {
@@ -49,32 +50,28 @@ object Path {
implicit def string2path(s: String): Path = apply(s)
implicit def jfile2path(jfile: JFile): Path = apply(jfile)
- // java 7 style, we don't use it yet
- // object AccessMode extends Enumeration {
- // val EXECUTE, READ, WRITE = Value
- // }
- // def checkAccess(modes: AccessMode*): Boolean = {
- // modes foreach {
- // case EXECUTE => throw new Exception("Unsupported") // can't check in java 5
- // case READ => if (!jfile.canRead()) return false
- // case WRITE => if (!jfile.canWrite()) return false
- // }
- // true
- // }
-
def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
def onlyDirs(xs: List[Path]): List[Directory] = xs filter (_.isDirectory) map (_.toDirectory)
def onlyFiles(xs: Iterator[Path]): Iterator[File] = xs filter (_.isFile) map (_.toFile)
- def onlyFiles(xs: List[Path]): List[File] = xs filter (_.isFile) map (_.toFile)
def roots: List[Path] = java.io.File.listRoots().toList map Path.apply
- def apply(segments: Seq[String]): Path = apply(segments mkString java.io.File.separator)
def apply(path: String): Path = apply(new JFile(path))
- def apply(jfile: JFile): Path =
- if (jfile.isFile) new File(jfile)
- else if (jfile.isDirectory) new Directory(jfile)
+ def apply(jfile: JFile): Path = try {
+ def isFile = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount)
+ jfile.isFile
+ }
+
+ def isDirectory = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount)
+ jfile.isDirectory
+ }
+
+ if (isFile) new File(jfile)
+ else if (isDirectory) new Directory(jfile)
else new Path(jfile)
+ } catch { case ex: SecurityException => new Path(jfile) }
/** Avoiding any shell/path issues by only using alphanumerics. */
private[io] def randomPrefix = alphanumeric take 6 mkString ""
@@ -84,19 +81,13 @@ import Path._
/** The Path constructor is private so we can enforce some
* semantics regarding how a Path might relate to the world.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class Path private[io] (val jfile: JFile) {
val separator = java.io.File.separatorChar
val separatorStr = java.io.File.separator
- // Validation: this verifies that the type of this object and the
- // contents of the filesystem are in agreement. All objects are
- // valid except File objects whose path points to a directory and
- // Directory objects whose path points to a file.
- def isValid: Boolean = true
-
// conversions
def toFile: File = new File(jfile)
def toDirectory: Directory = new Directory(jfile)
@@ -104,6 +95,7 @@ class Path private[io] (val jfile: JFile) {
def toCanonical: Path = Path(jfile.getCanonicalPath())
def toURI: URI = jfile.toURI()
def toURL: URL = toURI.toURL()
+
/** If this path is absolute, returns it: otherwise, returns an absolute
* path made up of root / this.
*/
@@ -136,7 +128,6 @@ class Path private[io] (val jfile: JFile) {
def name: String = jfile.getName()
def path: String = jfile.getPath()
def normalize: Path = Path(jfile.getAbsolutePath())
- def isRootPath: Boolean = roots exists (_ isSame this)
def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
def relativize(other: Path) = {
@@ -152,9 +143,8 @@ class Path private[io] (val jfile: JFile) {
Path(createRelativePath(segments, other.segments))
}
- // derived from identity
- def root: Option[Path] = roots find (this startsWith _)
def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
+
/**
* @return The path of the parent directory, or root if path is already root
*/
@@ -185,10 +175,6 @@ class Path private[io] (val jfile: JFile) {
if (i < 0) ""
else name.substring(i + 1)
}
- // def extension: String = (name lastIndexOf '.') match {
- // case -1 => ""
- // case idx => name drop (idx + 1)
- // }
// compares against extensions in a CASE INSENSITIVE way.
def hasExtension(ext: String, exts: String*) = {
val lower = extension.toLowerCase
@@ -212,23 +198,28 @@ class Path private[io] (val jfile: JFile) {
// Boolean tests
def canRead = jfile.canRead()
def canWrite = jfile.canWrite()
- def exists = jfile.exists()
- def notExists = try !jfile.exists() catch { case ex: SecurityException => false }
+ def exists = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount)
+ try jfile.exists() catch { case ex: SecurityException => false }
+ }
- def isFile = jfile.isFile()
- def isDirectory = jfile.isDirectory()
+ def isFile = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount)
+ try jfile.isFile() catch { case ex: SecurityException => false }
+ }
+ def isDirectory = {
+ if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount)
+ try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." }
+ }
def isAbsolute = jfile.isAbsolute()
- def isHidden = jfile.isHidden()
def isEmpty = path.length == 0
// Information
def lastModified = jfile.lastModified()
- def lastModified_=(time: Long) = jfile setLastModified time // should use setXXX function?
def length = jfile.length()
// Boolean path comparisons
def endsWith(other: Path) = segments endsWith other.segments
- def startsWith(other: Path) = segments startsWith other.segments
def isSame(other: Path) = toCanonical == other.toCanonical
def isFresher(other: Path) = lastModified > other.lastModified
@@ -248,7 +239,6 @@ class Path private[io] (val jfile: JFile) {
// deletions
def delete() = jfile.delete()
- def deleteIfExists() = if (jfile.exists()) delete() else false
/** Deletes the path recursively. Returns false on failure.
* Use with caution!
@@ -270,16 +260,6 @@ class Path private[io] (val jfile: JFile) {
length == 0
}
- def touch(modTime: Long = System.currentTimeMillis) = {
- createFile()
- if (isFile)
- lastModified = modTime
- }
-
- // todo
- // def copyTo(target: Path, options ...): Boolean
- // def moveTo(target: Path, options ...): Boolean
-
override def toString() = path
override def equals(other: Any) = other match {
case x: Path => path == x.path
diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala
index 82b0568657..8f24d84488 100644
--- a/src/reflect/scala/reflect/io/PlainFile.scala
+++ b/src/reflect/scala/reflect/io/PlainFile.scala
@@ -3,23 +3,12 @@
* @author Martin Odersky
*/
-
-package scala.reflect
+package scala
+package reflect
package io
import java.io.{ FileInputStream, FileOutputStream, IOException }
-import PartialFunction._
-/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
-object PlainFile {
- /**
- * If the specified File exists, returns an abstract file backed
- * by it. Otherwise, returns null.
- */
- def fromPath(file: Path): PlainFile =
- if (file.isDirectory) new PlainDirectory(file.toDirectory)
- else if (file.isFile) new PlainFile(file)
- else null
-}
+
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
override def isDirectory = true
@@ -28,7 +17,7 @@ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) {
}
/** This class implements an abstract file backed by a File.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class PlainFile(val givenPath: Path) extends AbstractFile {
@@ -54,7 +43,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
override def sizeOption = Some(givenPath.length.toInt)
override def toString = path
- override def hashCode(): Int = fpath.hashCode
+ override def hashCode(): Int = fpath.hashCode()
override def equals(that: Any): Boolean = that match {
case x: PlainFile => fpath == x.fpath
case _ => false
@@ -68,8 +57,14 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
+ // Optimization: Assume that the file was not deleted and did not have permissions changed
+ // between the call to `list` and the iteration. This saves a call to `exists`.
+ def existsFast(path: Path) = path match {
+ case (_: Directory | _: io.File) => true
+ case _ => path.exists
+ }
if (!isDirectory) Iterator.empty
- else givenPath.toDirectory.list filter (_.exists) map (new PlainFile(_))
+ else givenPath.toDirectory.list filter existsFast map (new PlainFile(_))
}
/**
@@ -77,10 +72,6 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
* specified name. If there is no such file, returns null. The
* argument "directory" tells whether to look for a directory or
* or a regular file.
- *
- * @param name ...
- * @param directory ...
- * @return ...
*/
def lookupName(name: String, directory: Boolean): AbstractFile = {
val child = givenPath / name
diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala
index 61ec8a4c23..aa47947672 100644
--- a/src/reflect/scala/reflect/io/Streamable.scala
+++ b/src/reflect/scala/reflect/io/Streamable.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.net.{ URI, URL }
@@ -17,14 +18,14 @@ import Path.fail
*
* @author Paul Phillips
* @since 2.8
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object Streamable {
/** Traits which can be viewed as a sequence of bytes. Source types
* which know their length should override def length: Long for more
* efficient method implementations.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
trait Bytes {
@@ -69,7 +70,7 @@ object Streamable {
}
/** For objects which can be viewed as Chars.
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
trait Chars extends Bytes {
@@ -81,7 +82,6 @@ object Streamable {
*/
def creationCodec: Codec = implicitly[Codec]
- def chars(): BufferedSource = chars(creationCodec)
def chars(codec: Codec): BufferedSource = Source.fromInputStream(inputStream())(codec)
def lines(): Iterator[String] = lines(creationCodec)
@@ -89,8 +89,7 @@ object Streamable {
/** Obtains an InputStreamReader wrapped around a FileInputStream.
*/
- def reader(): InputStreamReader = reader(creationCodec)
- def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream, codec.charSet)
+ def reader(codec: Codec): InputStreamReader = new InputStreamReader(inputStream(), codec.charSet)
/** Wraps a BufferedReader around the result of reader().
*/
@@ -108,7 +107,10 @@ object Streamable {
/** Convenience function to import entire file into a String.
*/
def slurp(): String = slurp(creationCodec)
- def slurp(codec: Codec) = chars(codec).mkString
+ def slurp(codec: Codec) = {
+ val src = chars(codec)
+ try src.mkString finally src.close() // Always Be Closing
+ }
}
/** Call a function on something Closeable, finally closing it. */
@@ -117,7 +119,9 @@ object Streamable {
finally stream.close()
def bytes(is: => InputStream): Array[Byte] =
- (new Bytes { def inputStream() = is }).toByteArray
+ (new Bytes {
+ def inputStream() = is
+ }).toByteArray()
def slurp(is: => InputStream)(implicit codec: Codec): String =
new Chars { def inputStream() = is } slurp codec
diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala
index 78713c2ae0..aa6ceaa09f 100644
--- a/src/reflect/scala/reflect/io/VirtualDirectory.scala
+++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala
@@ -2,7 +2,8 @@
* Copyright 2005-2013 LAMP/EPFL
*/
-package scala.reflect
+package scala
+package reflect
package io
import scala.collection.mutable
@@ -11,7 +12,7 @@ import scala.collection.mutable
* An in-memory directory.
*
* @author Lex Spoon
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory])
@@ -26,22 +27,23 @@ extends AbstractFile {
def container = maybeContainer.get
def isDirectory = true
- var lastModified: Long = System.currentTimeMillis
+ override def isVirtual = true
+ val lastModified: Long = System.currentTimeMillis
override def file = null
override def input = sys.error("directories cannot be read")
override def output = sys.error("directories cannot be written")
/** Does this abstract file denote an existing file? */
- def create() { unsupported }
+ def create() { unsupported() }
/** Delete the underlying file or directory (recursively). */
- def delete() { unsupported }
+ def delete() { unsupported() }
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported
+ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported()
private val files = mutable.Map.empty[String, AbstractFile]
@@ -53,20 +55,20 @@ extends AbstractFile {
(files get name filter (_.isDirectory == directory)).orNull
override def fileNamed(name: String): AbstractFile =
- Option(lookupName(name, false)) getOrElse {
+ Option(lookupName(name, directory = false)) getOrElse {
val newFile = new VirtualFile(name, path+'/'+name)
files(name) = newFile
newFile
}
override def subdirectoryNamed(name: String): AbstractFile =
- Option(lookupName(name, true)) getOrElse {
+ Option(lookupName(name, directory = true)) getOrElse {
val dir = new VirtualDirectory(name, Some(this))
files(name) = dir
dir
}
def clear() {
- files.clear();
+ files.clear()
}
}
diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala
index 95f4429fad..45f38db745 100644
--- a/src/reflect/scala/reflect/io/VirtualFile.scala
+++ b/src/reflect/scala/reflect/io/VirtualFile.scala
@@ -3,18 +3,17 @@
* @author Martin Odersky
*/
-
-package scala.reflect
+package scala
+package reflect
package io
-import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream }
-import java.io.{ File => JFile }
+import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, OutputStream, File => JFile }
/** This class implements an in-memory file.
*
* @author Philippe Altherr
* @version 1.0, 23/03/2004
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
class VirtualFile(val name: String, override val path: String) extends AbstractFile {
@@ -33,20 +32,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
case _ => false
}
- //########################################################################
- // Private data
private var content = Array.emptyByteArray
- //########################################################################
- // Public Methods
def absolute = this
/** Returns null. */
- final def file: JFile = null
+ def file: JFile = null
- override def sizeOption: Option[Int] = Some(content.size)
+ override def sizeOption: Option[Int] = Some(content.length)
- def input : InputStream = new ByteArrayInputStream(content);
+ def input : InputStream = new ByteArrayInputStream(content)
override def output: OutputStream = {
new ByteArrayOutputStream() {
@@ -62,10 +57,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
/** Is this abstract file a directory? */
def isDirectory: Boolean = false
+ /** @inheritdoc */
+ override def isVirtual: Boolean = true
+
+ // private var _lastModified: Long = 0
+ // _lastModified
+
/** Returns the time that this abstract file was last modified. */
- private var _lastModified: Long = 0
- def lastModified: Long = _lastModified
- def lastModified_=(x: Long) = _lastModified = x
+ // !!! Except it doesn't - it's private and never set - so I replaced it
+ // with constant 0 to save the field.
+ def lastModified: Long = 0
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
@@ -74,20 +75,16 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
}
/** Does this abstract file denote an existing file? */
- def create() { unsupported }
+ def create() { unsupported() }
/** Delete the underlying file or directory (recursively). */
- def delete() { unsupported }
+ def delete() { unsupported() }
/**
* Returns the abstract file in this abstract directory with the
* specified name. If there is no such file, returns null. The
* argument "directory" tells whether to look for a directory or
* or a regular file.
- *
- * @param name ...
- * @param directory ...
- * @return ...
*/
def lookupName(name: String, directory: Boolean): AbstractFile = {
assert(isDirectory, "not a directory '" + this + "'")
@@ -97,7 +94,5 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
- def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
-
- //########################################################################
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
}
diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala
index 3b57721e89..eabf1dcbab 100644
--- a/src/reflect/scala/reflect/io/ZipArchive.scala
+++ b/src/reflect/scala/reflect/io/ZipArchive.scala
@@ -3,14 +3,17 @@
* @author Paul Phillips
*/
-package scala.reflect
+package scala
+package reflect
package io
import java.net.URL
-import java.io.{ IOException, InputStream, ByteArrayInputStream }
+import java.io.{ IOException, InputStream, ByteArrayInputStream, FilterInputStream }
import java.io.{ File => JFile }
import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream }
+import java.util.jar.Manifest
import scala.collection.{ immutable, mutable }
+import scala.collection.convert.WrapAsScala.asScalaIterator
import scala.annotation.tailrec
/** An abstraction for zip files and streams. Everything is written the way
@@ -20,13 +23,10 @@ import scala.annotation.tailrec
* @author Philippe Altherr (original version)
* @author Paul Phillips (this one)
* @version 2.0,
- *
+ *
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object ZipArchive {
- def fromPath(path: String): FileZipArchive = fromFile(new JFile(path))
- def fromPath(path: Path): FileZipArchive = fromFile(path.toFile)
-
/**
* @param file a File
* @return A ZipArchive if `file` is a readable zip file, otherwise null.
@@ -41,10 +41,11 @@ object ZipArchive {
* @return A ZipArchive backed by the given url.
*/
def fromURL(url: URL): URLZipArchive = new URLZipArchive(url)
- def fromURL(url: String): URLZipArchive = fromURL(new URL(url))
- private def dirName(path: String) = splitPath(path, true)
- private def baseName(path: String) = splitPath(path, false)
+ def fromManifestURL(url: URL): AbstractFile = new ManifestResources(url)
+
+ private def dirName(path: String) = splitPath(path, front = true)
+ private def baseName(path: String) = splitPath(path, front = false)
private def splitPath(path0: String, front: Boolean): String = {
val isDir = path0.charAt(path0.length - 1) == '/'
val path = if (isDir) path0.substring(0, path0.length - 1) else path0
@@ -65,13 +66,13 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
override def underlyingSource = Some(this)
def isDirectory = true
- def lookupName(name: String, directory: Boolean) = unsupported
- def lookupNameUnchecked(name: String, directory: Boolean) = unsupported
- def create() = unsupported
- def delete() = unsupported
- def output = unsupported
- def container = unsupported
- def absolute = unsupported
+ def lookupName(name: String, directory: Boolean) = unsupported()
+ def lookupNameUnchecked(name: String, directory: Boolean) = unsupported()
+ def create() = unsupported()
+ def delete() = unsupported()
+ def output = unsupported()
+ def container = unsupported()
+ def absolute = unsupported()
private def walkIterator(its: Iterator[AbstractFile]): Iterator[AbstractFile] = {
its flatMap { f =>
@@ -79,7 +80,6 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq
else Iterator(f)
}
}
- def deepIterator = walkIterator(iterator)
/** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */
sealed abstract class Entry(path: String) extends VirtualFile(baseName(path), path) {
// have to keep this name for compat with sbt's compiler-interface
@@ -232,3 +232,63 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) {
case _ => false
}
}
+
+final class ManifestResources(val url: URL) extends ZipArchive(null) {
+ def iterator = {
+ val root = new DirEntry("/")
+ val dirs = mutable.HashMap[String, DirEntry]("/" -> root)
+ val manifest = new Manifest(input)
+ val iter = manifest.getEntries().keySet().iterator().filter(_.endsWith(".class")).map(new ZipEntry(_))
+
+ while (iter.hasNext) {
+ val zipEntry = iter.next()
+ val dir = getDir(dirs, zipEntry)
+ if (zipEntry.isDirectory) dir
+ else {
+ class FileEntry() extends Entry(zipEntry.getName) {
+ override def lastModified = zipEntry.getTime()
+ override def input = resourceInputStream(path)
+ override def sizeOption = None
+ }
+ val f = new FileEntry()
+ dir.entries(f.name) = f
+ }
+ }
+
+ try root.iterator
+ finally dirs.clear()
+ }
+
+ def name = path
+ def path: String = {
+ val s = url.getPath
+ val n = s.lastIndexOf('!')
+ s.substring(0, n)
+ }
+ def input = url.openStream()
+ def lastModified =
+ try url.openConnection().getLastModified()
+ catch { case _: IOException => 0 }
+
+ override def canEqual(other: Any) = other.isInstanceOf[ManifestResources]
+ override def hashCode() = url.hashCode
+ override def equals(that: Any) = that match {
+ case x: ManifestResources => url == x.url
+ case _ => false
+ }
+
+ private def resourceInputStream(path: String): InputStream = {
+ new FilterInputStream(null) {
+ override def read(): Int = {
+ if(in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path);
+ if(in == null) throw new RuntimeException(path + " not found")
+ super.read();
+ }
+
+ override def close(): Unit = {
+ super.close();
+ in = null;
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala
index 92d76f4370..9e05f343e6 100644
--- a/src/reflect/scala/reflect/macros/Aliases.scala
+++ b/src/reflect/scala/reflect/macros/Aliases.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index 007df3b6e2..c1ab269268 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -56,8 +57,6 @@ abstract class Attachments { self =>
// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
// IDE via $outer pointers.
-// Forward compatibility note: This class used to be Attachments$NonemptyAttachments.
-// However it's private, therefore it transcends the compatibility policy for 2.10.x.
private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
type Pos = P
def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
diff --git a/src/reflect/scala/reflect/macros/Context.scala b/src/reflect/scala/reflect/macros/Context.scala
index aa1c1db227..434b7c1b9c 100644
--- a/src/reflect/scala/reflect/macros/Context.scala
+++ b/src/reflect/scala/reflect/macros/Context.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
// todo. introduce context hierarchy
@@ -36,7 +37,8 @@ trait Context extends Aliases
with Typers
with Parsers
with Evals
- with ExprUtils {
+ with ExprUtils
+ with Synthetics {
/** The compile-time universe. */
val universe: Universe
@@ -51,7 +53,7 @@ trait Context extends Aliases
/** The prefix tree from which the macro is selected.
*
- * For a example, for a macro `filter` defined as an instance method on a collection `Coll`,
+ * For example, for a macro `filter` defined as an instance method on a collection `Coll`,
* `prefix` represents an equivalent of `this` for normal instance methods:
*
* {{{
diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala
index a4ad71c348..d6ba5f39cd 100644
--- a/src/reflect/scala/reflect/macros/Enclosures.scala
+++ b/src/reflect/scala/reflect/macros/Enclosures.scala
@@ -1,6 +1,9 @@
-package scala.reflect
+package scala
+package reflect
package macros
+import scala.language.existentials // SI-6541
+
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
@@ -15,19 +18,32 @@ trait Enclosures {
/** The tree that undergoes macro expansion.
* Can be useful to get an offset or a range position of the entire tree being processed.
*/
- val macroApplication: Tree
+ def macroApplication: Tree
+
+ /** The semantic role that `macroApplication` plays in the code.
+ */
+ type MacroRole
+
+ /** The role that represents an application of a term macro,
+ * e.g. `M(2)(3)` in `val x = M(2)(3)` or `M(a, b)` in `x match { case x @ M(a, b) => }`.
+ */
+ def APPLY_ROLE: MacroRole
+
+ /** The semantic role that `macroApplication` plays in the code.
+ */
+ def macroRole: MacroRole
/** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
* Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
*
* Is also priceless for emitting sane error messages for macros that are called by other macros on synthetic (i.e. position-less) trees.
- * In that dire case navigate the ``enclosingMacros'' stack, and it will most likely contain at least one macro with a position-ful macro application.
- * See ``enclosingPosition'' for a default implementation of this logic.
+ * In that dire case navigate the `enclosingMacros` stack, and it will most likely contain at least one macro with a position-ful macro application.
+ * See `enclosingPosition` for a default implementation of this logic.
*
* Unlike `openMacros`, this is a val, which means that it gets initialized when the context is created
* and always stays the same regardless of whatever happens during macro expansion.
*/
- val enclosingMacros: List[Context]
+ def enclosingMacros: List[Context]
/** Information about one of the currently considered implicit candidates.
* Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
@@ -39,28 +55,56 @@ trait Enclosures {
* Unlike `openImplicits`, this is a val, which means that it gets initialized when the context is created
* and always stays the same regardless of whatever happens during macro expansion.
*/
- val enclosingImplicits: List[(Type, Tree)]
+ def enclosingImplicits: List[ImplicitCandidate]
/** Tries to guess a position for the enclosing application.
- * But that is simple, right? Just dereference ``pos'' of ``macroApplication''? Not really.
+ * But that is simple, right? Just dereference `pos` of `macroApplication`? Not really.
* If we're in a synthetic macro expansion (no positions), we must do our best to infer the position of something that triggerd this expansion.
- * Surprisingly, quite often we can do this by navigation the ``enclosingMacros'' stack.
+ * Surprisingly, quite often we can do this by navigation the `enclosingMacros` stack.
*/
- val enclosingPosition: Position
+ def enclosingPosition: Position
/** Tree that corresponds to the enclosing method, or EmptyTree if not applicable.
*/
- val enclosingMethod: Tree
+ @deprecated("Use enclosingDef instead, but be wary of changes in semantics", "2.10.1")
+ def enclosingMethod: Tree
/** Tree that corresponds to the enclosing class, or EmptyTree if not applicable.
*/
- val enclosingClass: Tree
+ @deprecated("Use enclosingImpl instead, but be wary of changes in semantics", "2.10.1")
+ def enclosingClass: Tree
+
+ /** Tree that corresponds to the enclosing DefDef tree.
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingDef: universe.DefDef
+
+ /** Tree that corresponds to the enclosing Template tree.
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingTemplate: universe.Template
+
+ /** Tree that corresponds to the enclosing ImplDef tree (i.e. either ClassDef or ModuleDef).
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingImpl: universe.ImplDef
+
+ /** Tree that corresponds to the enclosing PackageDef tree.
+ * Throws `EnclosureException` if there's no such enclosing tree.
+ */
+ def enclosingPackage: universe.PackageDef
/** Compilation unit that contains this macro application.
*/
- val enclosingUnit: CompilationUnit
+ def enclosingUnit: CompilationUnit
/** Compilation run that contains this macro application.
*/
- val enclosingRun: Run
-} \ No newline at end of file
+ def enclosingRun: Run
+
+ /** Indicates than one of the enclosure methods failed to find a tree
+ * of required type among enclosing trees.
+ */
+ case class EnclosureException(expected: Class[_], enclosingTrees: List[Tree])
+ extends Exception(s"Couldn't find a tree of type $expected among enclosing trees $enclosingTrees")
+}
diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala
index 37680c219b..70b2ab58d4 100644
--- a/src/reflect/scala/reflect/macros/Evals.scala
+++ b/src/reflect/scala/reflect/macros/Evals.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -54,4 +55,4 @@ trait Evals {
* refers to a runtime value `x`, which is unknown at compile time.
*/
def eval[T](expr: Expr[T]): T
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala
index 458cde9692..af11bd6efc 100644
--- a/src/reflect/scala/reflect/macros/ExprUtils.scala
+++ b/src/reflect/scala/reflect/macros/ExprUtils.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala
index 67b24088b5..6abd8c335b 100644
--- a/src/reflect/scala/reflect/macros/FrontEnds.scala
+++ b/src/reflect/scala/reflect/macros/FrontEnds.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -44,4 +45,4 @@ trait FrontEnds {
* Use `enclosingPosition` if you're in doubt what position to pass to `pos`.
*/
def abort(pos: Position, msg: String): Nothing
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala
index 99706e84fe..eb63fb7b7f 100644
--- a/src/reflect/scala/reflect/macros/Infrastructure.scala
+++ b/src/reflect/scala/reflect/macros/Infrastructure.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -22,4 +23,4 @@ trait Infrastructure {
/** Exposes current classpath. */
def classPath: List[java.net.URL]
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/Macro.scala b/src/reflect/scala/reflect/macros/Macro.scala
new file mode 100644
index 0000000000..44bedf483d
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Macro.scala
@@ -0,0 +1,39 @@
+package scala.reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * Traditionally macro implementations are defined as methods,
+ * but this trait provides an alternative way of encoding macro impls as
+ * bundles, traits which extend `scala.reflect.macros.Macro`.
+ *
+ * Instead of:
+ *
+ * def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[Int]) = ...
+ *
+ * One can write:
+ *
+ * trait Impl extends Macro {
+ * def apply[T: c.WeakTypeTag](x: c.Expr[Int]) = ...
+ * }
+ *
+ * Without changing anything else at all.
+ *
+ * This language feature is useful in itself in cases when macro implementations
+ * are complex and need to be modularized. State of the art technique of addressing this need is quite heavyweight:
+ * http://docs.scala-lang.org/overviews/macros/overview.html#writing_bigger_macros.
+ *
+ * However utility of this approach to writing macros isn't limited to just convenience.
+ * When a macro implementation becomes not just a function, but a full-fledged module,
+ * it can define callbacks that will be called by the compiler upon interesting events.
+ * In subsequent commits I will add support for programmable type inference
+ */
+trait Macro {
+ /** The context to be used by the macro implementation.
+ *
+ * Vanilla macro implementations have to carry it in their signatures, however when a macro is a full-fledged module,
+ * it can define the context next to the implementation, makes implementation signature more lightweight.
+ */
+ val c: Context
+}
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index 8bbaa5f848..8773175561 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -11,13 +12,27 @@ trait Names {
self: Context =>
/** Creates a unique string. */
+ @deprecated("Use freshName instead", "2.11.0")
def fresh(): String
/** Creates a unique string having a given prefix. */
+ @deprecated("Use freshName instead", "2.11.0")
def fresh(name: String): String
/** Creates a unique name having a given name as a prefix and
* having the same flavor (term name or type name) as the given name.
*/
+ @deprecated("Use freshName instead", "2.11.0")
def fresh[NameType <: Name](name: NameType): NameType
+
+ /** Creates a unique string. */
+ def freshName(): String
+
+ /** Creates a unique string having a given prefix. */
+ def freshName(name: String): String
+
+ /** Creates a unique name having a given name as a prefix and
+ * having the same flavor (term name or type name) as the given name.
+ */
+ def freshName[NameType <: Name](name: NameType): NameType
}
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index 93a763792c..3b25309614 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -19,4 +20,4 @@ trait Parsers {
/** Indicates an error during [[scala.reflect.macros.Parsers#parse]].
*/
-case class ParseException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class ParseException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala
index 3db7b9af02..6ebd2db730 100644
--- a/src/reflect/scala/reflect/macros/Reifiers.scala
+++ b/src/reflect/scala/reflect/macros/Reifiers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -11,16 +12,16 @@ trait Reifiers {
self: Context =>
/** Given a tree, generate a tree that when compiled and executed produces the original tree.
- * For more information and examples see the documentation for ``Universe.reify''.
+ * For more information and examples see the documentation for `Universe.reify`.
*
- * The produced tree will be bound to the specified ``universe'' and ``mirror''.
- * Possible values for ``universe'' include ``universe.treeBuild.mkRuntimeUniverseRef''.
- * Possible values for ``mirror'' include ``EmptyTree'' (in that case the reifier will automatically pick an appropriate mirror).
+ * The produced tree will be bound to the specified `universe` and `mirror`.
+ * Possible values for `universe` include `universe.treeBuild.mkRuntimeUniverseRef`.
+ * Possible values for `mirror` include `EmptyTree` (in that case the reifier will automatically pick an appropriate mirror).
*
- * This function is deeply connected to ``Universe.reify'', a macro that reifies arbitrary expressions into runtime trees.
- * They do very similar things (``Universe.reify'' calls ``Context.reifyTree'' to implement itself), but they operate on different metalevels (see below).
+ * This function is deeply connected to `Universe.reify`, a macro that reifies arbitrary expressions into runtime trees.
+ * They do very similar things (`Universe.reify` calls `Context.reifyTree` to implement itself), but they operate on different metalevels (see below).
*
- * Let's study the differences between ``Context.reifyTree'' and ``Universe.reify'' on an example of using them inside a ``fooMacro'' macro:
+ * Let's study the differences between `Context.reifyTree` and `Universe.reify` on an example of using them inside a `fooMacro` macro:
*
* * Since reify itself is a macro, it will be executed when fooMacro is being compiled (metalevel -1)
* and will produce a tree that when evaluated during macro expansion of fooMacro (metalevel 0) will recreate the input tree.
@@ -39,7 +40,7 @@ trait Reifiers {
* * The result of compiling and running the result of reify will be bound to the Universe that called reify.
* This is possible because it's a macro, so it can generate whatever code it wishes.
*
- * * The result of compiling and running the result of reifyTree will be the ``prefix'' that needs to be passed explicitly.
+ * * The result of compiling and running the result of reifyTree will be the `prefix` that needs to be passed explicitly.
* This happens because the Universe of the evaluated result is from a different metalevel than the Context the called reify.
*
* Typical usage of this function is to retain some of the trees received/created by a macro
@@ -48,13 +49,13 @@ trait Reifiers {
def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree
/** Given a type, generate a tree that when compiled and executed produces the original type.
- * The produced tree will be bound to the specified ``universe'' and ``mirror''.
- * For more information and examples see the documentation for ``Context.reifyTree'' and ``Universe.reify''.
+ * The produced tree will be bound to the specified `universe` and `mirror`.
+ * For more information and examples see the documentation for `Context.reifyTree` and `Universe.reify`.
*/
def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree
/** Given a type, generate a tree that when compiled and executed produces the runtime class of the original type.
- * If ``concrete'' is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
+ * If `concrete` is true, then this function will bail on types, who refer to abstract types (like `ClassTag` does).
*/
def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree
@@ -86,10 +87,10 @@ trait Reifiers {
* Such errors represent one of the standard ways for reification to go wrong, e.g.
* an attempt to create a `TypeTag` from a weak type.
*/
-case class ReificationException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class ReificationException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
/** Indicates an unexpected expected error during one of the `reifyXXX` methods in [[scala.reflect.macros.Reifiers]].
* Such errors wrap random crashes in reification logic and are distinguished from expected [[scala.reflect.macros.ReificationException]]s
* so that the latter can be reported as compilation errors, while the former manifest themselves as compiler crashes.
*/
-case class UnexpectedReificationException(val pos: scala.reflect.api.Position, val msg: String, val cause: Throwable = null) extends Exception(msg, cause)
+case class UnexpectedReificationException(pos: scala.reflect.api.Position, msg: String, cause: Throwable = null) extends Exception(msg, cause)
diff --git a/src/reflect/scala/reflect/macros/Synthetics.scala b/src/reflect/scala/reflect/macros/Synthetics.scala
new file mode 100644
index 0000000000..5e422ee89f
--- /dev/null
+++ b/src/reflect/scala/reflect/macros/Synthetics.scala
@@ -0,0 +1,107 @@
+package scala
+package reflect
+package macros
+
+/**
+ * <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
+ *
+ * A slice of [[scala.reflect.macros.Context the Scala macros context]] that
+ * exposes functions to introduce synthetic definitions.
+ *
+ * @define TOPLEVEL_TREE Top-level tree is a tree that represents a non-inner class or object in one of the currently compiled source files.
+ * Note that top-level isn't equivalent to [[scala.reflect.api.Symbols#SymbolApi.isStatic]],
+ * because static also embraces definitions nested in static objects
+ *
+ * @define INTRODUCE_TOP_LEVEL Allowed definitions include classes (represented by `ClassDef` trees), traits (represented
+ * by `ClassDef` trees having the `TRAIT` flag set in `mods`) and objects (represented by `ModuleDef` trees).
+ *
+ * The definitions are put into the package with a prototype provided in `packagePrototype`.
+ * Supported prototypes are (see [[PackageSpec]] for more details):
+ * * Strings and names representing a fully-qualified name of the package
+ * * Trees that can work as package ids
+ * * Package or package class symbols
+ *
+ * Typical value for a package prototype is a fully-qualified name in a string.
+ * For example, to generate a class available at `foo.bar.Test`, call this method as follows:
+ *
+ * introduceTopLevel("foo.bar", ClassDef(<mods>, TypeName("Test"), <tparams>, <template>))
+ *
+ * It is possible to add definitions to the empty package by using `nme.EMPTY_PACKAGE_NAME.toString`, but
+ * that's not recommended, since such definitions cannot be seen from outside the empty package.
+ *
+ * Only the multi-parameter overload of this method can be used to introduce companions.
+ * If companions are introduced by two different calls, then they will be put into different virtual files, and `scalac`
+ * will show an error about companions being defined in different files. By the way, this also means that there's currently no way
+ * to define a companion for an existing class or module
+ */
+trait Synthetics {
+ self: Context =>
+
+ import universe._
+
+ /** Looks up a top-level definition tree with a given fully-qualified name
+ * (term name for modules, type name for classes). $TOPLEVEL_TREE.
+ * If such a tree does not exist, returns `EmptyTree`.
+ */
+ def topLevelDef(name: Name): Tree
+
+ /** Returns a reference to a top-level definition tree with a given fully-qualified name
+ * (term name for modules, type name for classes). $TOPLEVEL_TREE.
+ * If such a tree does not exist, returns `EmptyTree`.
+ */
+ def topLevelRef(name: Name): Tree
+
+ /** Adds a top-level definition to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
+ *
+ * Returns a fully-qualified reference to the introduced definition.
+ */
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definition: ImplDef): RefTree
+
+ /** Adds a list of top-level definitions to the compiler's symbol table. $INTRODUCE_TOP_LEVEL.
+ *
+ * Returns a list of fully-qualified references to the introduced definitions.
+ */
+ def introduceTopLevel[T: PackageSpec](packagePrototype: T, definitions: ImplDef*): List[RefTree]
+
+ /** A factory which can create a package def from a prototype and a list of declarations.
+ */
+ trait PackageSpec[T] { def mkPackageDef(prototype: T, stats: List[Tree]): PackageDef }
+
+ /** Hosts supported package specs.
+ */
+ object PackageSpec {
+ /** Package def can be created from a fully-qualified name and a list of definitions.
+ * The name is converted into an Ident or a chain of Selects.
+ */
+ implicit val stringIsPackageSpec = new PackageSpec[String] {
+ def mkPackageDef(prototype: String, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+
+ /** Package def can be created from a fully-qualified term name and a list of definitions.
+ * The name is converted into an Ident or a chain of Selects.
+ */
+ implicit val termNameIsPackageSpec = new PackageSpec[TermName] {
+ def mkPackageDef(prototype: TermName, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+
+ /** Package def can be created from a package id tree and a list of definitions.
+ * If the tree is not a valid package id, i.e. is not a term-name ident or a chain of term-name selects,
+ * then the produced PackageDef will fail compilation at some point in the future.
+ */
+ implicit val refTreeIsPackageSpec = new PackageSpec[RefTree] {
+ def mkPackageDef(prototype: RefTree, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+
+ /** Package def can be created from a package/package class symbol and a list of definitions.
+ * If the provided symbol is not a package symbol or a package class symbol, package construction will throw an exception.
+ */
+ implicit val SymbolIsPackageSpec = new PackageSpec[Symbol] {
+ def mkPackageDef(prototype: Symbol, stats: List[Tree]): PackageDef = self.mkPackageDef(prototype, stats)
+ }
+ }
+
+ protected def mkPackageDef(name: String, stats: List[Tree]): PackageDef
+ protected def mkPackageDef(name: TermName, stats: List[Tree]): PackageDef
+ protected def mkPackageDef(tree: RefTree, stats: List[Tree]): PackageDef
+ protected def mkPackageDef(sym: Symbol, stats: List[Tree]): PackageDef
+}
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
index 204dc40858..427b4f70d1 100644
--- a/src/reflect/scala/reflect/macros/TreeBuilder.scala
+++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -11,7 +12,6 @@ abstract class TreeBuilder {
val global: Universe
import global._
- import definitions._
/** Builds a reference to value whose type is given stable prefix.
* The type must be suitable for this. For example, it
@@ -28,19 +28,25 @@ abstract class TreeBuilder {
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree
/** Builds a typed reference to given symbol with given stable prefix. */
- def mkAttributedRef(pre: Type, sym: Symbol): Tree
+ def mkAttributedRef(pre: Type, sym: Symbol): RefTree
/** Builds a typed reference to given symbol. */
- def mkAttributedRef(sym: Symbol): Tree
+ def mkAttributedRef(sym: Symbol): RefTree
+
+ /** Builds an untyped reference to given symbol. Requires the symbol to be static. */
+ def mkUnattributedRef(sym: Symbol): RefTree
+
+ /** Builds an untyped reference to symbol with given name. Requires the symbol to be static. */
+ def mkUnattributedRef(fullName: Name): RefTree
/** Builds a typed This reference to given symbol. */
- def mkAttributedThis(sym: Symbol): Tree
+ def mkAttributedThis(sym: Symbol): This
/** Builds a typed Ident with an underlying symbol. */
- def mkAttributedIdent(sym: Symbol): Tree
+ def mkAttributedIdent(sym: Symbol): RefTree
/** Builds a typed Select with an underlying symbol. */
- def mkAttributedSelect(qual: Tree, sym: Symbol): Tree
+ def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree
/** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
* There are a number of variations.
@@ -67,6 +73,6 @@ abstract class TreeBuilder {
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
- /** A tree that refers to the runtime reflexive universe, ``scala.reflect.runtime.universe''. */
+ /** A tree that refers to the runtime reflexive universe, `scala.reflect.runtime.universe`. */
def mkRuntimeUniverseRef: Tree
}
diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala
index d36636a6d2..d7aec9b3ef 100644
--- a/src/reflect/scala/reflect/macros/Typers.scala
+++ b/src/reflect/scala/reflect/macros/Typers.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -10,8 +11,6 @@ package macros
trait Typers {
self: Context =>
- import universe._
-
/** Contexts that represent macros in-flight, including the current one. Very much like a stack trace, but for macros only.
* Can be useful for interoperating with other macros and for imposing compiler-friendly limits on macro expansion.
*
@@ -28,13 +27,22 @@ trait Typers {
* Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
* hence implicit searches can recursively trigger other implicit searches.
*
+ * `pre` and `sym` provide information about the candidate itself.
+ * `pt` and `tree` store the parameters of the implicit search the candidate is participating in.
+ */
+ case class ImplicitCandidate(pre: Type, sym: Symbol, pt: Type, tree: Tree)
+
+ /** Information about one of the currently considered implicit candidates.
+ * Candidates are used in plural form, because implicit parameters may themselves have implicit parameters,
+ * hence implicit searches can recursively trigger other implicit searches.
+ *
* Can be useful to get information about an application with an implicit parameter that is materialized during current macro expansion.
* If we're in an implicit macro being expanded, it's included in this list.
*
* Unlike `enclosingImplicits`, this is a def, which means that it gets recalculated on every invocation,
* so it might change depending on what is going on during macro expansion.
*/
- def openImplicits: List[(Type, Tree)]
+ def openImplicits: List[ImplicitCandidate]
/** Typechecks the provided tree against the expected type `pt` in the macro callsite context.
*
@@ -49,7 +57,7 @@ trait Typers {
*
* @throws [[scala.reflect.macros.TypecheckException]]
*/
- def typeCheck(tree: Tree, pt: Type = WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
+ def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree
/** Infers an implicit value of the expected type `pt` in the macro callsite context.
* Optional `pos` parameter provides a position that will be associated with the implicit search.
@@ -92,4 +100,4 @@ trait Typers {
/** Indicates an error during one of the methods in [[scala.reflect.macros.Typers]].
*/
-case class TypecheckException(val pos: scala.reflect.api.Position, val msg: String) extends Exception(msg)
+case class TypecheckException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index 4e76f7c408..d1d90f53c9 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package macros
/**
@@ -114,7 +115,7 @@ abstract class Universe extends scala.reflect.api.Universe {
def setPos(newpos: Position): Tree
/** Sets the `tpe` of the tree. Returns `Unit`. */
- def tpe_=(t: Type): Unit
+ @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit
/** Sets the `tpe` of the tree. Returns the tree itself. */
def setType(tp: Type): Tree
@@ -238,4 +239,4 @@ abstract class Universe extends scala.reflect.api.Universe {
/** The AST that corresponds to this compilation unit. */
def body: Tree
}
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala
index 21d189bb25..2e2e8e79f8 100644
--- a/src/reflect/scala/reflect/macros/package.scala
+++ b/src/reflect/scala/reflect/macros/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index ccc727451c..a3684f602f 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.ref.WeakReference
@@ -6,23 +7,20 @@ import scala.collection.mutable.WeakHashMap
import java.lang.{Class => jClass, Package => jPackage}
import java.lang.reflect.{
- Method => jMethod, Constructor => jConstructor, Modifier => jModifier, Field => jField,
+ Method => jMethod, Constructor => jConstructor, Field => jField,
Member => jMember, Type => jType, TypeVariable => jTypeVariable, Array => jArray,
+ AccessibleObject => jAccessibleObject,
GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement }
import java.lang.annotation.{Annotation => jAnnotation}
import java.io.IOException
-import internal.MissingRequirementError
+import scala.reflect.internal.{ MissingRequirementError, JavaAccFlags, JMethodOrConstructor }
import internal.pickling.ByteCodecs
-import internal.ClassfileConstants._
import internal.pickling.UnPickler
import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
-//import scala.tools.nsc.util.ScalaClassLoader
-//import scala.tools.nsc.util.ScalaClassLoader._
import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance}
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
-import scala.reflect.internal.util.Collections._
private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
@@ -63,7 +61,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
/** The API of a mirror for a reflective universe */
class JavaMirror(owner: Symbol,
- /** Class loader that is a mastermind behind the reflexive mirror */
+ /* Class loader that is a mastermind behind the reflexive mirror */
val classLoader: ClassLoader
) extends Roots(owner) with super.JavaMirror { thisMirror =>
@@ -91,12 +89,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
// ----------- Caching ------------------------------------------------------------------
- private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
- private val packageCache = new TwoWayCache[Package, ModuleSymbol]
- private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
+ private val classCache = new TwoWayCache[jClass[_], ClassSymbol]
+ private val packageCache = new TwoWayCache[Package, ModuleSymbol]
+ private val methodCache = new TwoWayCache[jMethod, MethodSymbol]
private val constructorCache = new TwoWayCache[jConstructor[_], MethodSymbol]
- private val fieldCache = new TwoWayCache[jField, TermSymbol]
- private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
+ private val fieldCache = new TwoWayCache[jField, TermSymbol]
+ private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol]
private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S =
cache.toScala(key){
@@ -104,39 +102,35 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
body(mirrorDefining(jclazz), key)
}
- private implicit val classHasJavaClass: HasJavaClass[jClass[_]] =
- new HasJavaClass(identity)
- private implicit val methHasJavaClass: HasJavaClass[jMethod]
- = new HasJavaClass(_.getDeclaringClass)
- private implicit val fieldHasJavaClass: HasJavaClass[jField] =
- new HasJavaClass(_.getDeclaringClass)
- private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] =
- new HasJavaClass(_.getDeclaringClass)
+ private implicit val classHasJavaClass: HasJavaClass[jClass[_]] = new HasJavaClass(identity)
+ private implicit val methHasJavaClass: HasJavaClass[jMethod] = new HasJavaClass(_.getDeclaringClass)
+ private implicit val fieldHasJavaClass: HasJavaClass[jField] = new HasJavaClass(_.getDeclaringClass)
+ private implicit val constrHasJavaClass: HasJavaClass[jConstructor[_]] = new HasJavaClass(_.getDeclaringClass)
private implicit val tparamHasJavaClass: HasJavaClass[jTypeVariable[_ <: GenericDeclaration]] =
new HasJavaClass ( (tparam: jTypeVariable[_ <: GenericDeclaration]) => {
tparam.getGenericDeclaration match {
- case jclazz: jClass[_] => jclazz
- case jmeth: jMethod => jmeth.getDeclaringClass
+ case jclazz: jClass[_] => jclazz
+ case jmeth: jMethod => jmeth.getDeclaringClass
case jconstr: jConstructor[_] => jconstr.getDeclaringClass
}
})
// ----------- Implementations of mirror operations and classes -------------------
- private def ErrorInnerClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
- private def ErrorInnerModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
- private def ErrorStaticClass(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
- private def ErrorStaticModule(sym: Symbol) = throw new ScalaReflectionException(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
- private def ErrorNotMember(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
- private def ErrorNotField(sym: Symbol) = throw new ScalaReflectionException(s"expected a field or an accessor method symbol, you provided $sym")
- private def ErrorNonExistentField(sym: Symbol) = throw new ScalaReflectionException(
+ private def abort(msg: String) = throw new ScalaReflectionException(msg)
+
+ private def ErrorInnerClass(sym: Symbol) = abort(s"$sym is an inner class, use reflectClass on an InstanceMirror to obtain its ClassMirror")
+ private def ErrorInnerModule(sym: Symbol) = abort(s"$sym is an inner module, use reflectModule on an InstanceMirror to obtain its ModuleMirror")
+ private def ErrorStaticClass(sym: Symbol) = abort(s"$sym is a static class, use reflectClass on a RuntimeMirror to obtain its ClassMirror")
+ private def ErrorStaticModule(sym: Symbol) = abort(s"$sym is a static module, use reflectModule on a RuntimeMirror to obtain its ModuleMirror")
+ private def ErrorNotMember(sym: Symbol, owner: Symbol) = abort(s"expected a member of $owner, you provided ${sym.kindString} ${sym.fullName}")
+ private def ErrorNotField(sym: Symbol) = abort(s"expected a field or an accessor method symbol, you provided $sym")
+ private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = abort(s"expected a constructor of $owner, you provided $sym")
+ private def ErrorFree(member: Symbol, freeType: Symbol) = abort(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
+ private def ErrorNonExistentField(sym: Symbol) = abort(
sm"""Scala field ${sym.name} isn't represented as a Java field, neither it has a Java accessor method
|note that private parameters of class constructors don't get mapped onto fields and/or accessors,
|unless they are used outside of their declaring constructors.""")
- @deprecated("corresponding check has been removed from FieldMirror.set, this method is also being phased out", "2.11.0")
- private def ErrorSetImmutableField(sym: Symbol) = throw new ScalaReflectionException(s"cannot set an immutable field ${sym.name}")
- private def ErrorNotConstructor(sym: Symbol, owner: Symbol) = throw new ScalaReflectionException(s"expected a constructor of $owner, you provided $sym")
- private def ErrorFree(member: Symbol, freeType: Symbol) = throw new ScalaReflectionException(s"cannot reflect ${member.kindString} ${member.name}, because it's a member of a weak type ${freeType.name}")
/** Helper functions for extracting typed values from a (Class[_], Any)
* representing an annotation argument.
@@ -214,7 +208,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case _ => body
}
}
-
private def checkMemberOf(sym: Symbol, owner: ClassSymbol) {
if (sym.owner == AnyClass || sym.owner == AnyRefClass || sym.owner == ObjectClass) {
// do nothing
@@ -240,16 +233,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (staticClazz.isPrimitive) staticClazz else dynamicClazz
}
- private class JavaInstanceMirror[T: ClassTag](val instance: T)
- extends InstanceMirror {
+ private class JavaInstanceMirror[T: ClassTag](val instance: T) extends InstanceMirror {
def symbol = thisMirror.classSymbol(preciseClass(instance))
def reflectField(field: TermSymbol): FieldMirror = {
checkMemberOf(field, symbol)
if ((field.isMethod && !field.isAccessor) || field.isModule) ErrorNotField(field)
- val name =
- if (field.isGetter) nme.getterToLocal(field.name)
- else if (field.isSetter) nme.getterToLocal(nme.setterToGetter(field.name))
- else field.name
+ val name = if (field.isAccessor) field.localName else field.name
val field1 = (field.owner.info decl name).asTerm
try fieldToJava(field1)
catch {
@@ -276,20 +265,14 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaFieldMirror(val receiver: Any, val symbol: TermSymbol)
extends FieldMirror {
- lazy val jfield = {
- val jfield = fieldToJava(symbol)
- if (!jfield.isAccessible) jfield.setAccessible(true)
- jfield
- }
- def get = jfield.get(receiver)
+ lazy val jfield = ensureAccessible(fieldToJava(symbol))
+ def get = jfield get receiver
def set(value: Any) = {
// it appears useful to be able to set values of vals, therefore I'm disabling this check
// if (!symbol.isMutable) ErrorSetImmutableField(symbol)
jfield.set(receiver, value)
}
- // this dummy method is necessary to prevent the optimizer from stripping off ErrorSetImmutableField
- // which would break binary compatibility with 2.10.0
- private def dummy(symbol: Symbol) = ErrorSetImmutableField(symbol)
+ def bind(newReceiver: Any) = new JavaFieldMirror(newReceiver, symbol)
override def toString = s"field mirror for ${symbol.fullName} (bound to $receiver)"
}
@@ -320,13 +303,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
// the "symbol == Any_getClass || symbol == Object_getClass" test doesn't cut it
// because both AnyVal and its primitive descendants define their own getClass methods
- private def isGetClass(meth: MethodSymbol) = meth.name.toString == "getClass" && meth.paramss.flatten.isEmpty
+ private def isGetClass(meth: MethodSymbol) = (meth.name string_== "getClass") && meth.paramss.flatten.isEmpty
private def isStringConcat(meth: MethodSymbol) = meth == String_+ || (meth.owner.isPrimitiveValueClass && meth.returnType =:= StringClass.toType)
lazy val bytecodelessMethodOwners = Set[Symbol](AnyClass, AnyValClass, AnyRefClass, ObjectClass, ArrayClass) ++ ScalaPrimitiveValueClasses
lazy val bytecodefulObjectMethods = Set[Symbol](Object_clone, Object_equals, Object_finalize, Object_hashCode, Object_toString,
Object_notify, Object_notifyAll) ++ ObjectClass.info.member(nme.wait_).asTerm.alternatives.map(_.asMethod)
private def isBytecodelessMethod(meth: MethodSymbol): Boolean = {
- if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isTermMacro) return true
+ if (isGetClass(meth) || isStringConcat(meth) || meth.owner.isPrimitiveValueClass || meth == Predef_classOf || meth.isMacro) return true
bytecodelessMethodOwners(meth.owner) && !bytecodefulObjectMethods(meth)
}
@@ -337,19 +320,23 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def mkJavaMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): JavaMethodMirror = {
if (isBytecodelessMethod(symbol)) new JavaBytecodelessMethodMirror(receiver, symbol)
else if (symbol.paramss.flatten exists (p => isByNameParamType(p.info))) new JavaByNameMethodMirror(receiver, symbol)
- else new JavaVanillaMethodMirror(receiver, symbol)
- }
-
- private abstract class JavaMethodMirror(val symbol: MethodSymbol)
- extends MethodMirror {
- lazy val jmeth = {
- val jmeth = methodToJava(symbol)
- if (!jmeth.isAccessible) jmeth.setAccessible(true)
- jmeth
+ else {
+ symbol.paramss.flatten.length match {
+ case 0 => new JavaVanillaMethodMirror0(receiver, symbol)
+ case 1 => new JavaVanillaMethodMirror1(receiver, symbol)
+ case 2 => new JavaVanillaMethodMirror2(receiver, symbol)
+ case 3 => new JavaVanillaMethodMirror3(receiver, symbol)
+ case 4 => new JavaVanillaMethodMirror4(receiver, symbol)
+ case _ => new JavaVanillaMethodMirror(receiver, symbol)
+ }
}
+ }
+ private abstract class JavaMethodMirror(val symbol: MethodSymbol) extends MethodMirror {
+ lazy val jmeth = ensureAccessible(methodToJava(symbol))
+ def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
def jinvoke(jmeth: jMethod, receiver: Any, args: Seq[Any]): Any = {
- val result = jmeth.invoke(receiver, args.asInstanceOf[Seq[AnyRef]]: _*)
+ val result = jinvokeraw(jmeth, receiver, args)
if (jmeth.getReturnType == java.lang.Void.TYPE) ()
else result
}
@@ -359,11 +346,43 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaVanillaMethodMirror(val receiver: Any, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
+ def bind(newReceiver: Any) = new JavaVanillaMethodMirror(newReceiver, symbol)
def apply(args: Any*): Any = jinvoke(jmeth, receiver, args)
}
+ private class JavaVanillaMethodMirror0(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror0(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver)
+ }
+
+ private class JavaVanillaMethodMirror1(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror1(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror2(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror2(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror3(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror3(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef])
+ }
+
+ private class JavaVanillaMethodMirror4(receiver: Any, symbol: MethodSymbol)
+ extends JavaVanillaMethodMirror(receiver, symbol) {
+ override def bind(newReceiver: Any) = new JavaVanillaMethodMirror4(newReceiver, symbol)
+ override def jinvokeraw(jmeth: jMethod, receiver: Any, args: Seq[Any]) = jmeth.invoke(receiver, args(0).asInstanceOf[AnyRef], args(1).asInstanceOf[AnyRef], args(2).asInstanceOf[AnyRef], args(3).asInstanceOf[AnyRef])
+ }
+
private class JavaByNameMethodMirror(val receiver: Any, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
+ def bind(newReceiver: Any) = new JavaByNameMethodMirror(newReceiver, symbol)
def apply(args: Any*): Any = {
val transformed = map2(args.toList, symbol.paramss.flatten)((arg, param) => if (isByNameParamType(param.info)) () => arg else arg)
jinvoke(jmeth, receiver, transformed)
@@ -372,6 +391,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaBytecodelessMethodMirror[T: ClassTag](val receiver: T, symbol: MethodSymbol)
extends JavaMethodMirror(symbol) {
+ def bind(newReceiver: Any) = new JavaBytecodelessMethodMirror(newReceiver.asInstanceOf[T], symbol)
def apply(args: Any*): Any = {
// checking type conformance is too much of a hassle, so we don't do it here
// actually it's not even necessary, because we manually dispatch arguments below
@@ -383,14 +403,14 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val varargMatch = args.length >= params.length - 1 && isVarArgsList(params)
if (!perfectMatch && !varargMatch) {
val n_arguments = if (isVarArgsList(params)) s"${params.length - 1} or more" else s"${params.length}"
- var s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
- throw new ScalaReflectionException(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
+ val s_arguments = if (params.length == 1 && !isVarArgsList(params)) "argument" else "arguments"
+ abort(s"${showMethodSig(symbol)} takes $n_arguments $s_arguments")
}
def objReceiver = receiver.asInstanceOf[AnyRef]
def objArg0 = args(0).asInstanceOf[AnyRef]
def objArgs = args.asInstanceOf[Seq[AnyRef]]
- def fail(msg: String) = throw new ScalaReflectionException(msg + ", it cannot be invoked with mirrors")
+ def fail(msg: String) = abort(msg + ", it cannot be invoked with mirrors")
def invokePrimitiveMethod = {
val jmeths = classOf[BoxesRunTime].getDeclaredMethods.filter(_.getName == nme.primitiveMethodName(symbol.name).toString)
@@ -420,7 +440,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case sym if isStringConcat(sym) => receiver.toString + objArg0
case sym if sym.owner.isPrimitiveValueClass => invokePrimitiveMethod
case sym if sym == Predef_classOf => fail("Predef.classOf is a compile-time function")
- case sym if sym.isTermMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
+ case sym if sym.isMacro => fail(s"${symbol.fullName} is a macro, i.e. a compile-time function")
case _ => abort(s"unsupported symbol $symbol when invoking $this")
}
}
@@ -428,15 +448,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private class JavaConstructorMirror(val outer: AnyRef, val symbol: MethodSymbol)
extends MethodMirror {
+ def bind(newReceiver: Any) = new JavaConstructorMirror(newReceiver.asInstanceOf[AnyRef], symbol)
override val receiver = outer
- lazy val jconstr = {
- val jconstr = constructorToJava(symbol)
- if (!jconstr.isAccessible) jconstr.setAccessible(true)
- jconstr
- }
+ lazy val jconstr = ensureAccessible(constructorToJava(symbol))
def apply(args: Any*): Any = {
if (symbol.owner == ArrayClass)
- throw new ScalaReflectionException("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
+ abort("Cannot instantiate arrays with mirrors. Consider using `scala.reflect.ClassTag(<class of element>).newArray(<length>)` instead")
val effectiveArgs =
if (outer == null) args.asInstanceOf[Seq[AnyRef]]
@@ -469,11 +486,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def erasure = symbol.moduleClass.asClass
def isStatic = true
def instance = {
- if (symbol.owner.isPackageClass)
+ if (symbol.isTopLevel)
staticSingletonInstance(classLoader, symbol.fullName)
else
if (outer == null) staticSingletonInstance(classToJava(symbol.moduleClass.asClass))
- else innerSingletonInstance(outer, symbol.name)
+ else innerSingletonInstance(outer, symbol.name.toString)
}
override def toString = s"module mirror for ${symbol.fullName} (bound to $outer)"
}
@@ -497,16 +514,13 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
}
def javaClass(path: String): jClass[_] =
- Class.forName(path, true, classLoader)
+ jClass.forName(path, true, classLoader)
/** Does `path` correspond to a Java class with that fully qualified name in the current class loader? */
- def tryJavaClass(path: String): Option[jClass[_]] =
- try {
- Some(javaClass(path))
- } catch {
- case (_: ClassNotFoundException) | (_: NoClassDefFoundError) | (_: IncompatibleClassChangeError) =>
- None
- }
+ def tryJavaClass(path: String): Option[jClass[_]] = (
+ try Some(javaClass(path))
+ catch { case ex @ (_: LinkageError | _: ClassNotFoundException) => None } // TODO - log
+ )
/** The mirror that corresponds to the classloader that original defined the given Java class */
def mirrorDefining(jclazz: jClass[_]): JavaMirror = {
@@ -530,7 +544,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def markAbsent(tpe: Type) = setAllInfos(clazz, module, tpe)
def handleError(ex: Exception) = {
markAbsent(ErrorType)
- if (settings.debug.value) ex.printStackTrace()
+ if (settings.debug) ex.printStackTrace()
val msg = ex.getMessage()
MissingRequirementError.signal(
(if (msg eq null) "reflection error while loading " + clazz.name
@@ -579,7 +593,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case None =>
// class does not have a Scala signature; it's a Java class
info("translating reflection info for Java " + jclazz) //debug
- initClassModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
+ initClassAndModule(clazz, module, new FromJavaClassCompleter(clazz, module, jclazz))
}
}
} catch {
@@ -623,13 +637,22 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
// SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser
val jexTpes = jann match {
- case jm: jMethod => jm.getExceptionTypes.toList
+ case jm: jMethod => jm.getExceptionTypes.toList
case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList
- case _ => Nil
+ case _ => Nil
}
jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe)))
}
+ private implicit class jClassOps(val clazz: jClass[_]) {
+ def javaFlags: JavaAccFlags = JavaAccFlags(clazz)
+ def scalaFlags: Long = javaFlags.toScalaFlags
+ }
+ private implicit class jMemberOps(val member: jMember) {
+ def javaFlags: JavaAccFlags = JavaAccFlags(member)
+ def scalaFlags: Long = javaFlags.toScalaFlags
+ }
+
/**
* A completer that fills in the types of a Scala class and its companion object
* by copying corresponding type info from a Java class. This completer is used
@@ -649,14 +672,14 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def load(sym: Symbol): Unit = {
debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym)
- val flags = toScalaClassFlags(jclazz.getModifiers)
+ val flags = jclazz.scalaFlags
clazz setFlag (flags | JAVA)
if (module != NoSymbol) {
module setFlag (flags & PRIVATE | JAVA)
module.moduleClass setFlag (flags & PRIVATE | JAVA)
}
- relatedSymbols foreach (importPrivateWithinFromJavaFlags(_, jclazz.getModifiers))
+ propagatePackageBoundary(jclazz, relatedSymbols: _*)
copyAnnotations(clazz, jclazz)
// to do: annotations to set also for module?
@@ -678,7 +701,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val parents = try {
parentsLevel += 1
val jsuperclazz = jclazz.getGenericSuperclass
- val superclazz = if (jsuperclazz == null) AnyClass.tpe else typeToScala(jsuperclazz)
+ val superclazz = if (jsuperclazz == null) AnyTpe else typeToScala(jsuperclazz)
superclazz :: (jclazz.getGenericInterfaces.toList map typeToScala)
} finally {
parentsLevel -= 1
@@ -688,29 +711,21 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
module.moduleClass setInfo new ClassInfoType(List(), newScope, module.moduleClass)
}
- def enter(sym: Symbol, mods: Int) =
- (if (jModifier.isStatic(mods)) module.moduleClass else clazz).info.decls enter sym
+ def enter(sym: Symbol, mods: JavaAccFlags) =
+ ( if (mods.isStatic) module.moduleClass else clazz ).info.decls enter sym
- for (jinner <- jclazz.getDeclaredClasses) {
+ for (jinner <- jclazz.getDeclaredClasses)
jclassAsScala(jinner) // inner class is entered as a side-effect
// no need to call enter explicitly
- }
-
- pendingLoadActions = { () =>
-
- for (jfield <- jclazz.getDeclaredFields)
- enter(jfieldAsScala(jfield), jfield.getModifiers)
-
- for (jmeth <- jclazz.getDeclaredMethods)
- enter(jmethodAsScala(jmeth), jmeth.getModifiers)
-
- for (jconstr <- jclazz.getConstructors)
- enter(jconstrAsScala(jconstr), jconstr.getModifiers)
- } :: pendingLoadActions
+ pendingLoadActions ::= { () =>
+ jclazz.getDeclaredFields foreach (f => enter(jfieldAsScala(f), f.javaFlags))
+ jclazz.getDeclaredMethods foreach (m => enter(jmethodAsScala(m), m.javaFlags))
+ jclazz.getConstructors foreach (c => enter(jconstrAsScala(c), c.javaFlags))
+ }
if (parentsLevel == 0) {
- while (!pendingLoadActions.isEmpty) {
+ while (pendingLoadActions.nonEmpty) {
val item = pendingLoadActions.head
pendingLoadActions = pendingLoadActions.tail
item()
@@ -729,8 +744,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* If Java modifiers `mods` contain STATIC, return the module class
* of the companion module of `clazz`, otherwise the class `clazz` itself.
*/
- private def followStatic(clazz: Symbol, mods: Int) =
- if (jModifier.isStatic(mods)) clazz.companionModule.moduleClass else clazz
+ private def followStatic(clazz: Symbol, mods: JavaAccFlags) =
+ if (mods.isStatic) clazz.companionModule.moduleClass else clazz
/** Methods which need to be treated with care
* because they either are getSimpleName or call getSimpleName:
@@ -766,7 +781,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (jclazz.isMemberClass) {
val jEnclosingClass = jclazz.getEnclosingClass
val sEnclosingClass = classToScala(jEnclosingClass)
- followStatic(sEnclosingClass, jclazz.getModifiers)
+ followStatic(sEnclosingClass, jclazz.javaFlags)
} else if (jclazz.isLocalClass0) {
val jEnclosingMethod = jclazz.getEnclosingMethod
if (jEnclosingMethod != null) {
@@ -794,7 +809,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* The Scala owner of the Scala symbol corresponding to the Java member `jmember`
*/
private def sOwner(jmember: jMember): Symbol = {
- followStatic(classToScala(jmember.getDeclaringClass), jmember.getModifiers)
+ followStatic(classToScala(jmember.getDeclaringClass), jmember.javaFlags)
}
/**
@@ -810,9 +825,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* that start with the given name are searched instead.
*/
private def lookup(clazz: Symbol, jname: String): Symbol = {
- def approximateMatch(sym: Symbol, jstr: String): Boolean =
- (sym.name.toString == jstr) ||
- sym.isPrivate && nme.expandedName(sym.name.toTermName, sym.owner).toString == jstr
+ def approximateMatch(sym: Symbol, jstr: String): Boolean = (
+ (sym.name string_== jstr)
+ || sym.isPrivate && (nme.expandedName(sym.name.toTermName, sym.owner) string_== jstr)
+ )
clazz.info.decl(newTermName(jname)) orElse {
(clazz.info.decls.iterator filter (approximateMatch(_, jname))).toList match {
@@ -834,7 +850,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def methodToScala1(jmeth: jMethod): MethodSymbol = {
val jOwner = jmeth.getDeclaringClass
val preOwner = classToScala(jOwner)
- val owner = followStatic(preOwner, jmeth.getModifiers)
+ val owner = followStatic(preOwner, jmeth.javaFlags)
(lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth))
.asMethod
}
@@ -848,26 +864,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
toScala(constructorCache, jconstr)(_ constructorToScala1 _)
private def constructorToScala1(jconstr: jConstructor[_]): MethodSymbol = {
- val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.getModifiers)
+ val owner = followStatic(classToScala(jconstr.getDeclaringClass), jconstr.javaFlags)
(lookup(owner, jconstr.getName) suchThat (erasesTo(_, jconstr)) orElse jconstrAsScala(jconstr))
.asMethod
}
/**
- * The Scala field corresponding to given Java field.
- * @param jfield The Java field
- * @return A Scala field object that corresponds to `jfield`.
- * // ??? should we return the getter instead?
- */
- def fieldToScala(jfield: jField): TermSymbol =
- toScala(fieldCache, jfield)(_ fieldToScala1 _)
-
- private def fieldToScala1(jfield: jField): TermSymbol = {
- val owner = followStatic(classToScala(jfield.getDeclaringClass), jfield.getModifiers)
- (lookup(owner, jfield.getName) suchThat (!_.isMethod) orElse jfieldAsScala(jfield)).asTerm
- }
-
- /**
* The Scala package corresponding to given Java package
*/
def packageToScala(jpkg: jPackage): ModuleSymbol = packageCache.toScala(jpkg) {
@@ -970,8 +972,8 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
javaTypeToValueClass(jclazz) orElse lookupClass
assert (cls.isType,
- sm"""${if (cls == NoSymbol) "not a type: symbol" else "no symbol could be"}
- | loaded from $jclazz in $owner with name $simpleName and classloader $classLoader""")
+ (if (cls != NoSymbol) s"not a type: symbol $cls" else "no symbol could be") +
+ s" loaded from $jclazz in $owner with name $simpleName and classloader $classLoader")
cls.asClass
}
@@ -988,7 +990,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def typeParamToScala1(jparam: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = {
val owner = genericDeclarationToScala(jparam.getGenericDeclaration)
owner.info match {
- case PolyType(tparams, _) => tparams.find(_.name.toString == jparam.getName).get.asType
+ case PolyType(tparams, _) => tparams.find(_.name string_== jparam.getName).get.asType
}
}
@@ -1000,6 +1002,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case jmeth: jMethod => methodToScala(jmeth)
case jconstr: jConstructor[_] => constructorToScala(jconstr)
}
+ def reflectMemberToScala(m: jMember): Symbol = m match {
+ case x: GenericDeclaration => genericDeclarationToScala(x)
+ case x: jField => jfieldAsScala(x)
+ }
/**
* Given some Java type arguments, a corresponding list of Scala types, plus potentially
@@ -1052,18 +1058,15 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* @param jclazz The Java class
* @return A Scala class symbol that wraps all reflection info of `jclazz`
*/
- private def jclassAsScala(jclazz: jClass[_]): Symbol = {
- val clazz = sOwner(jclazz) // sOwner called outside of closure for binary compatibility
- toScala(classCache, jclazz){ (mirror, jclazz) =>
- mirror.jclassAsScala(jclazz, clazz)
- }
- }
+ private def jclassAsScala(jclazz: jClass[_]): ClassSymbol =
+ toScala(classCache, jclazz)(_ jclassAsScala1 _)
+
+ private def jclassAsScala1(jclazz: jClass[_]): ClassSymbol = {
+ val owner = sOwner(jclazz)
+ val name = scalaSimpleName(jclazz)
+ val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
- private def jclassAsScala(jclazz: jClass[_], owner: Symbol): ClassSymbol = {
- val name = scalaSimpleName(jclazz)
- val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz)
- val (clazz, _) = createClassModule(owner, name, completer)
- clazz
+ initAndEnterClassAndModule(owner, name, completer)._1
}
/**
@@ -1077,10 +1080,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def jfieldAsScala1(jfield: jField): TermSymbol = {
val field = sOwner(jfield)
- .newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers))
+ .newValue(newTermName(jfield.getName), NoPosition, jfield.scalaFlags)
.setInfo(typeToScala(jfield.getGenericType))
- fieldCache enter (jfield, field)
- importPrivateWithinFromJavaFlags(field, jfield.getModifiers)
+
+ fieldCache.enter(jfield, field)
+ propagatePackageBoundary(jfield, field)
copyAnnotations(field, jfield)
field
}
@@ -1100,16 +1104,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def jmethodAsScala1(jmeth: jMethod): MethodSymbol = {
val clazz = sOwner(jmeth)
- val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, toScalaMethodFlags(jmeth.getModifiers))
+ val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags)
methodCache enter (jmeth, meth)
val tparams = jmeth.getTypeParameters.toList map createTypeParameter
val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala
val resulttpe = typeToScala(jmeth.getGenericReturnType)
setMethType(meth, tparams, paramtpes, resulttpe)
- importPrivateWithinFromJavaFlags(meth, jmeth.getModifiers)
+ propagatePackageBoundary(jmeth.javaFlags, meth)
copyAnnotations(meth, jmeth)
- if ((jmeth.getModifiers & JAVA_ACC_VARARGS) != 0) meth.setInfo(arrayToRepeated(meth.info))
- meth
+
+ if (jmeth.javaFlags.isVarargs)
+ meth modifyInfo arrayToRepeated
+ else
+ meth
}
/**
@@ -1124,26 +1131,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
private def jconstrAsScala1(jconstr: jConstructor[_]): MethodSymbol = {
// [Martin] Note: I know there's a lot of duplication wrt jmethodAsScala, but don't think it's worth it to factor this out.
val clazz = sOwner(jconstr)
- val constr = clazz.newConstructor(NoPosition, toScalaMethodFlags(jconstr.getModifiers))
+ val constr = clazz.newConstructor(NoPosition, jconstr.scalaFlags)
constructorCache enter (jconstr, constr)
val tparams = jconstr.getTypeParameters.toList map createTypeParameter
val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
- setMethType(constr, tparams, paramtpes, clazz.tpe)
+ setMethType(constr, tparams, paramtpes, clazz.tpe_*)
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
- importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers)
+ propagatePackageBoundary(jconstr.javaFlags, constr)
copyAnnotations(constr, jconstr)
constr
}
// -------------------- Scala to Java -----------------------------------
- /** Optionally, the Java package corresponding to a given Scala package, or None if no such Java package exists.
- * @param pkg The Scala package
- */
- def packageToJavaOption(pkg: ModuleSymbol): Option[jPackage] = packageCache.toJavaOption(pkg) {
- Option(jPackage.getPackage(pkg.fullName.toString))
- }
-
/** The Java class corresponding to given Scala class.
* Note: This only works for
* - top-level classes
@@ -1159,16 +1159,18 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
valueClassToJavaType(clazz)
else if (clazz == ArrayClass)
noClass
- else if (clazz.owner.isPackageClass)
+ else if (clazz.isTopLevel)
javaClass(clazz.javaClassName)
else if (clazz.owner.isClass) {
- val childOfClass = !clazz.owner.isModuleClass
- val childOfTopLevel = clazz.owner.owner.isPackageClass
+ val childOfClass = !clazz.owner.isModuleClass
+ val childOfTopLevel = clazz.owner.isTopLevel
val childOfTopLevelObject = clazz.owner.isModuleClass && childOfTopLevel
// suggested in https://issues.scala-lang.org/browse/SI-4023?focusedCommentId=54759#comment-54759
var ownerClazz = classToJava(clazz.owner.asClass)
- if (childOfTopLevelObject) ownerClazz = Class.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+ if (childOfTopLevelObject)
+ ownerClazz = jClass.forName(ownerClazz.getName stripSuffix "$", true, ownerClazz.getClassLoader)
+
val ownerChildren = ownerClazz.getDeclaredClasses
var fullNameOfJavaClass = ownerClazz.getName
@@ -1188,11 +1190,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
else sym.name.toString
/** The Java field corresponding to a given Scala field.
- * @param meth The Scala field.
+ * @param fld The Scala field.
*/
def fieldToJava(fld: TermSymbol): jField = fieldCache.toJava(fld) {
val jclazz = classToJava(fld.owner.asClass)
- val jname = nme.dropLocalSuffix(fld.name).toString
+ val jname = fld.name.dropLocal.toString
try jclazz getDeclaredField jname
catch {
case ex: NoSuchFieldException => jclazz getDeclaredField expandedName(fld)
@@ -1205,7 +1207,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
def methodToJava(meth: MethodSymbol): jMethod = methodCache.toJava(meth) {
val jclazz = classToJava(meth.owner.asClass)
val paramClasses = transformedType(meth).paramTypes map typeToJavaClass
- val jname = nme.dropLocalSuffix(meth.name).toString
+ val jname = meth.name.dropLocal.toString
try jclazz getDeclaredMethod (jname, paramClasses: _*)
catch {
case ex: NoSuchMethodException =>
@@ -1233,11 +1235,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* Pre: Scala type is already transformed to Java level.
*/
def typeToJavaClass(tpe: Type): jClass[_] = tpe match {
- case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
- case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
- case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
+ case ExistentialType(_, rtpe) => typeToJavaClass(rtpe)
+ case TypeRef(_, ArrayClass, List(elemtpe)) => jArrayClass(typeToJavaClass(elemtpe))
+ case TypeRef(_, sym: ClassSymbol, _) => classToJava(sym.asClass)
case tpe @ TypeRef(_, sym: AliasTypeSymbol, _) => typeToJavaClass(tpe.dealias)
- case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
+ case _ => throw new NoClassDefFoundError("no Java class corresponding to "+tpe+" found")
}
}
@@ -1277,14 +1279,12 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- syntheticCoreClasses get (owner.fullName, name) match {
- case Some(tsym) =>
- // synthetic core classes are only present in root mirrors
- // because Definitions.scala, which initializes and enters them, only affects rootMirror
- // therefore we need to enter them manually for non-root mirrors
- if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
- return tsym
- case None =>
+ syntheticCoreClasses get ((owner.fullName, name)) foreach { tsym =>
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
+ return tsym
}
}
info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 1b69ca4e89..06a7db6289 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -1,8 +1,7 @@
-package scala.reflect
+package scala
+package reflect
package runtime
-import internal.{SomePhase, NoPhase, Phase, TreeGen}
-
/** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders.
*
* Should not be instantiated directly, use [[scala.reflect.runtime.universe]] instead.
@@ -11,18 +10,21 @@ import internal.{SomePhase, NoPhase, Phase, TreeGen}
*/
class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
- def picklerPhase = SomePhase
-
+ override def inform(msg: String): Unit = log(msg)
+ def picklerPhase = internal.SomePhase
lazy val settings = new Settings
- def forInteractive = false
- def forScaladoc = false
+ private val isLogging = sys.props contains "scala.debug.reflect"
- def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg)
+ def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg)
type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+ // can't put this in runtime.Trees since that's mixed with Global in ReflectGlobal, which has the definition from internal.Trees
+ object treeInfo extends {
+ val global: JavaUniverse.this.type = JavaUniverse.this
+ } with internal.TreeInfo
+
init()
}
-
diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
index 6e28fc8520..84f159be00 100644
--- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import internal.{SomePhase, NoPhase, Phase, TreeGen}
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 33ad6d2430..2db9706007 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -3,10 +3,12 @@
* @author Paul Phillips
*/
-package scala.reflect.runtime
+package scala
+package reflect.runtime
import java.lang.{Class => jClass}
import java.lang.reflect.{ Method, InvocationTargetException, UndeclaredThrowableException }
+import scala.reflect.internal.util.AbstractFileClassLoader
/** A few java-reflection oriented utility functions useful during reflection bootstrapping.
*/
@@ -33,8 +35,8 @@ private[scala] object ReflectionUtils {
def isAbstractFileClassLoader(clazz: Class[_]): Boolean = {
if (clazz == null) return false
- if (clazz.getName == "scala.tools.nsc.interpreter.AbstractFileClassLoader") return true
- return isAbstractFileClassLoader(clazz.getSuperclass)
+ if (clazz == classOf[AbstractFileClassLoader]) return true
+ isAbstractFileClassLoader(clazz.getSuperclass)
}
def inferClasspath(cl: ClassLoader): String = cl match {
case cl: java.net.URLClassLoader =>
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 0e0cf3fc40..a14eafff24 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.reflect.internal.settings.MutableSettings
@@ -32,10 +33,10 @@ private[reflect] class Settings extends MutableSettings {
val Xexperimental = new BooleanSetting(false)
val XfullLubs = new BooleanSetting(false)
val XnoPatmatAnalysis = new BooleanSetting(false)
- val XoldPatmat = new BooleanSetting(false)
val Xprintpos = new BooleanSetting(false)
- val Ynotnull = new BooleanSetting(false)
val Yshowsymkinds = new BooleanSetting(false)
+ val Yposdebug = new BooleanSetting(false)
+ val Yrangepos = new BooleanSetting(false)
val debug = new BooleanSetting(false)
val deepCloning = new BooleanSetting(false)
val explaintypes = new BooleanSetting(false)
@@ -43,6 +44,7 @@ private[reflect] class Settings extends MutableSettings {
val printtypes = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
+ val breakCycles = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 61663f6181..815cc0c885 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import internal.Flags
@@ -28,7 +29,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
// try {
- atPhaseNotLaterThan(picklerPhase) {
+ enteringPhaseNotLaterThan(picklerPhase) {
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
@@ -57,7 +58,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* @param name The simple name of the newly created class
* @param completer The completer to be used to set the info of the class and the module
*/
- protected def createClassModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
+ protected def initAndEnterClassAndModule(owner: Symbol, name: TypeName, completer: (Symbol, Symbol) => LazyType) = {
assert(!(name.toString endsWith "[]"), name)
val clazz = owner.newClass(name)
val module = owner.newModule(name.toTermName)
@@ -67,7 +68,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
owner.info.decls enter clazz
owner.info.decls enter module
}
- initClassModule(clazz, module, completer(clazz, module))
+ initClassAndModule(clazz, module, completer(clazz, module))
(clazz, module)
}
@@ -75,7 +76,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
List(clazz, module, module.moduleClass) foreach (_ setInfo info)
}
- protected def initClassModule(clazz: Symbol, module: Symbol, completer: LazyType) =
+ protected def initClassAndModule(clazz: Symbol, module: Symbol, completer: LazyType) =
setAllInfos(clazz, module, completer)
/** The type completer for packages.
@@ -116,9 +117,9 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
currentMirror.tryJavaClass(path) match {
case Some(cls) =>
val loadingMirror = currentMirror.mirrorDefining(cls)
- val (clazz, module) =
+ val (_, module) =
if (loadingMirror eq currentMirror) {
- createClassModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
+ initAndEnterClassAndModule(pkgClass, name.toTypeName, new TopClassCompleter(_, _))
} else {
val origOwner = loadingMirror.packageNameToScala(pkgClass.fullName)
val clazz = origOwner.info decl name.toTypeName
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index 5c08e9a508..bcd4d16cde 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.reflect.internal.Flags._
@@ -11,10 +12,10 @@ import scala.reflect.internal.Flags._
private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
def info(msg: => String) =
- if (settings.verbose.value) println("[reflect-compiler] "+msg)
+ if (settings.verbose) println("[reflect-compiler] "+msg)
def debugInfo(msg: => String) =
- if (settings.debug.value) info(msg)
+ if (settings.debug) info(msg)
/** Declares that this is a runtime reflection universe.
*
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 7b280e59b9..132470b2e7 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
// SI-6240: test thread-safety, make trees synchronized as well
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 00f6952dc1..98cad45db1 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.reflect.io.AbstractFile
@@ -83,9 +84,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override protected def createPackageObjectClassSymbol(pos: Position, newFlags: Long): PackageObjectClassSymbol =
new PackageObjectClassSymbol(this, pos) with SynchronizedClassSymbol initFlags newFlags
- override protected def createTermSymbol(name: TermName, pos: Position, newFlags: Long): TermSymbol =
- new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
-
override protected def createMethodSymbol(name: TermName, pos: Position, newFlags: Long): MethodSymbol =
new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags
@@ -118,7 +116,8 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override def name_=(x: Name) = synchronized { super.name_=(x) }
override def rawname = synchronized { super.rawname }
override def typeConstructor: Type = synchronized { super.typeConstructor }
- override def tpe: Type = synchronized { super.tpe }
+ override def tpe_* : Type = synchronized { super.tpe_* }
+ override def tpeHK : Type = synchronized { super.tpeHK }
}
trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index a3e7c28ca4..f4b02c5bcd 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.collection.mutable.WeakHashMap
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
index 05debcba65..181ca6014a 100644
--- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala
+++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
package runtime
import scala.collection.mutable.WeakHashMap
diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala
index b97913daf0..41c1310e17 100644
--- a/src/reflect/scala/reflect/runtime/package.scala
+++ b/src/reflect/scala/reflect/runtime/package.scala
@@ -1,4 +1,5 @@
-package scala.reflect
+package scala
+package reflect
/** Entry points into runtime reflection.
* See [[scala.reflect.api.package the overview page]] for details on how to use them.
@@ -6,7 +7,7 @@ package scala.reflect
package object runtime {
/** The entry point into Scala runtime reflection.
- *
+ *
* To use Scala runtime reflection, simply use or import `scala.reflect.runtime.universe._`
*
* See [[scala.reflect.api.Universe]] or the
@@ -20,7 +21,7 @@ package object runtime {
*/
// implementation hardwired to the `currentMirror` method below
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
- def currentMirror: universe.Mirror = ??? // macro
+ def currentMirror: universe.Mirror = macro ???
}
package runtime {
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/repl/scala/tools/nsc/Interpreter.scala
index 434f19f21b..434f19f21b 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/repl/scala/tools/nsc/Interpreter.scala
diff --git a/src/compiler/scala/tools/nsc/InterpreterLoop.scala b/src/repl/scala/tools/nsc/InterpreterLoop.scala
index a0be3f4fdb..a0be3f4fdb 100644
--- a/src/compiler/scala/tools/nsc/InterpreterLoop.scala
+++ b/src/repl/scala/tools/nsc/InterpreterLoop.scala
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala
index e4a20b4a8c..43f0ea1256 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala
@@ -3,14 +3,12 @@
* @author Lex Spoon
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
-import java.net.URL
-import scala.tools.util.PathResolver
import io.{ File }
import util.{ ClassPath, ScalaClassLoader }
import Properties.{ versionString, copyrightString }
-import interpreter.{ ILoop }
import GenericRunnerCommand._
object JarRunner extends CommonRunner {
@@ -20,7 +18,7 @@ object JarRunner extends CommonRunner {
val jarURLs = ClassPath expandManifestPath jarPath
val urls = if (jarURLs.isEmpty) File(jarPath).toURL +: settings.classpathURLs else jarURLs
- if (settings.Ylogcp.value) {
+ if (settings.Ylogcp) {
Console.err.println("Running jar with these URLs as the classpath:")
urls foreach println
}
@@ -49,7 +47,7 @@ class MainGenericRunner {
def sampleCompiler = new Global(settings) // def so its not created unless needed
if (!command.ok) return errorFn("\n" + command.shortUsageMsg)
- else if (settings.version.value) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
+ else if (settings.version) return errorFn("Scala code runner %s -- %s".format(versionString, copyrightString))
else if (command.shouldStopWithInfo) return errorFn(command getInfoMessage sampleCompiler)
def isE = !settings.execute.isDefault
@@ -80,7 +78,7 @@ class MainGenericRunner {
Right(false)
case _ =>
// We start the repl when no arguments are given.
- Right(new ILoop process settings)
+ Right(new interpreter.ILoop process settings)
}
/** If -e and -i were both given, we want to execute the -e code after the
diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
new file mode 100644
index 0000000000..712219533d
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -0,0 +1,7 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.reflect.io.AbstractFile
+
+@deprecated("Use `scala.tools.nsc.util.AbstractFileClassLoader`", "2.11.0")
+class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) extends util.AbstractFileClassLoader(root, parent)
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
index e66e4eff29..e66e4eff29 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
+++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala b/src/repl/scala/tools/nsc/interpreter/ByteCode.scala
index 40e9d3d600..e1e3678837 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ByteCode.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ByteCode.scala
@@ -7,7 +7,6 @@ package scala.tools.nsc
package interpreter
import java.lang.reflect
-import java.util.concurrent.ConcurrentHashMap
import util.ScalaClassLoader
import ScalaClassLoader.appLoader
import scala.reflect.NameTransformer._
@@ -29,35 +28,5 @@ object ByteCode {
method.invoke(module, _: String).asInstanceOf[Option[Map[String, String]]]
}
- /** Scala sig bytes.
- */
- def scalaSigBytesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("scalaSigAnnotationBytes", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[Array[Byte]]]
- }
- yield names
-
- /** Attempts to retrieve case parameter names for given class name.
- */
- def caseParamNamesForPath(path: String) =
- for {
- module <- DECODER
- method <- decoderMethod("caseParamNames", classOf[String])
- names <- method.invoke(module, path).asInstanceOf[Option[List[String]]]
- }
- yield names
-
def aliasesForPackage(pkg: String) = aliasMap flatMap (_(pkg))
-
- /** Attempts to find type aliases in package objects.
- */
- def aliasForType(path: String): Option[String] = {
- val (pkg, name) = (path lastIndexOf '.') match {
- case -1 => return None
- case idx => (path take idx, path drop (idx + 1))
- }
- aliasesForPackage(pkg) flatMap (_ get name)
- }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
index 8042f0aee2..0ab92ab769 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CommandLine.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala
@@ -10,5 +10,4 @@ package interpreter
*/
class CommandLine(arguments: List[String], error: String => Unit) extends CompilerCommand(arguments, error) {
override def cmdName = "scala"
- override lazy val fileEndings = List(".scalaint")
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala
index 1dfccbfbf7..84a5cb49ae 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala
@@ -23,8 +23,6 @@ object NoCompletion extends Completion {
}
object Completion {
- def empty: Completion = NoCompletion
-
case class Candidates(cursor: Int, candidates: List[String]) { }
val NoCandidates = Candidates(-1, Nil)
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
index ab96f415db..3dd5d93390 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionAware.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionAware.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.reflect.NameTransformer
-
/** An interface for objects which are aware of tab completion and
* will supply their own candidates and resolve their own paths.
*/
@@ -53,31 +51,3 @@ trait CompletionAware {
results.sorted
}
}
-
-object CompletionAware {
- val Empty = new CompletionAware { def completions(verbosity: Int) = Nil }
-
- def unapply(that: Any): Option[CompletionAware] = that match {
- case x: CompletionAware => Some((x))
- case _ => None
- }
-
- /** Create a CompletionAware object from the given functions.
- * The first should generate the list of completions whenever queried,
- * and the second should return Some(CompletionAware) object if
- * subcompletions are possible.
- */
- def apply(terms: () => List[String], followFunction: String => Option[CompletionAware]): CompletionAware =
- new CompletionAware {
- def completions = terms()
- def completions(verbosity: Int) = completions
- override def follow(id: String) = followFunction(id)
- }
-
- /** Convenience factories.
- */
- def apply(terms: () => List[String]): CompletionAware = apply(terms, _ => None)
- def apply(map: scala.collection.Map[String, CompletionAware]): CompletionAware =
- apply(() => map.keys.toList, map.get _)
-}
-
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
index d14b5c79e0..d24ad60974 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/repl/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -38,7 +38,6 @@ trait CompletionOutput {
def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
- def relativize(sym: Symbol): String = relativize(sym.info)
def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
@@ -76,7 +75,7 @@ trait CompletionOutput {
}
def methodString() =
- method.keyString + " " + method.nameString + (method.info.normalize match {
+ method.keyString + " " + method.nameString + (method.info.dealiasWiden match {
case NullaryMethodType(resType) => ": " + typeToString(resType)
case PolyType(tparams, resType) => tparamsString(tparams) + typeToString(resType)
case mt @ MethodType(_, _) => methodTypeToString(mt)
diff --git a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
index 07e36f4f27..48af261937 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
@@ -7,19 +7,12 @@ package scala.tools.nsc
package interpreter
import scala.tools.jline.console.{ ConsoleReader, CursorBuffer }
-import scala.tools.jline.console.completer.CompletionHandler
-import Completion._
trait ConsoleReaderHelper extends ConsoleReader {
- def currentLine = "" + getCursorBuffer.buffer
- def currentPos = getCursorBuffer.cursor
def terminal = getTerminal()
def width = terminal.getWidth()
def height = terminal.getHeight()
- def paginate = isPaginationEnabled()
- def paginate_=(value: Boolean) = setPaginationEnabled(value)
- def goBack(num: Int): Unit
def readOneKey(prompt: String): Int
def eraseLine(): Unit
diff --git a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
index 80debfacb9..e88a044931 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Delimited.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
@@ -26,7 +26,6 @@ trait Delimited {
def delimited: Char => Boolean
def escapeChars: List[Char] = List('\\')
- def quoteChars: List[(Char, Char)] = List(('\'', '\''), ('"', '"'))
/** Break String into args based on delimiting function.
*/
@@ -39,6 +38,4 @@ trait Delimited {
def isDelimiterChar(ch: Char) = delimited(ch)
def isEscapeChar(ch: Char): Boolean = escapeChars contains ch
- def isQuoteStart(ch: Char): Boolean = quoteChars map (_._1) contains ch
- def isQuoteEnd(ch: Char): Boolean = quoteChars map (_._2) contains ch
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
index 827ebe1678..9353215e1e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.reflect.internal.util.BatchSourceFile
import scala.tools.nsc.ast.parser.Tokens.EOF
trait ExprTyper {
@@ -14,11 +13,11 @@ trait ExprTyper {
import repl._
import global.{ reporter => _, Import => _, _ }
- import definitions._
- import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
+ import syntaxAnalyzer.UnitParser
import naming.freshInternalVarName
- object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
+ object codeParser {
+ val global: repl.global.type = repl.global
def applyRule[T](code: String, rule: UnitParser => T): T = {
reporter.reset()
val scanner = newUnitParser(code)
@@ -29,11 +28,7 @@ trait ExprTyper {
result
}
-
- def defns(code: String) = stmts(code) collect { case x: DefTree => x }
- def expr(code: String) = applyRule(code, _.expr())
def stmts(code: String) = applyRule(code, _.templateStats())
- def stmt(code: String) = stmts(code).last // guaranteed nonempty
}
/** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
@@ -46,10 +41,6 @@ trait ExprTyper {
else Some(trees)
}
}
- // def parsesAsExpr(line: String) = {
- // import codeParser._
- // (opt expr line).isDefined
- // }
def symbolOfLine(code: String): Symbol = {
def asExpr(): Symbol = {
@@ -63,7 +54,7 @@ trait ExprTyper {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
- sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
+ sym0.cloneSymbol setInfo exitingTyper(sym0.info.finalResultType)
case _ => NoSymbol
}
}
@@ -104,4 +95,19 @@ trait ExprTyper {
}
finally typeOfExpressionDepth -= 1
}
+
+ // This only works for proper types.
+ def typeOfTypeString(typeString: String): Type = {
+ def asProperType(): Option[Type] = {
+ val name = freshInternalVarName()
+ val line = "def %s: %s = ???" format (name, typeString)
+ interpretSynthetic(line) match {
+ case IR.Success =>
+ val sym0 = symbolOfTerm(name)
+ Some(sym0.asMethod.returnType)
+ case _ => None
+ }
+ }
+ beSilentDuring(asProperType()) getOrElse NoType
+ }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
index 43e653edfd..43e653edfd 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Formatting.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Formatting.scala
diff --git a/src/repl/scala/tools/nsc/interpreter/IBindings.java b/src/repl/scala/tools/nsc/interpreter/IBindings.java
new file mode 100644
index 0000000000..b4cee4b957
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/IBindings.java
@@ -0,0 +1,45 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Raphael Jolly
+ */
+
+package scala.tools.nsc.interpreter;
+
+import java.util.Map;
+import java.util.AbstractMap;
+import java.util.Set;
+import java.util.AbstractSet;
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import javax.script.Bindings;
+
+abstract class IBindings extends AbstractMap<String, Object> implements Bindings {
+ public Set<Map.Entry<String, Object>> entrySet() {
+ return new AbstractSet<Map.Entry<String, Object>>() {
+ public int size() {
+ return 0;
+ }
+
+ public Iterator<Map.Entry<String, Object>> iterator() {
+ return new Iterator<Map.Entry<String, Object>>() {
+ public boolean hasNext() {
+ return false;
+ }
+
+ public Map.Entry<String, Object> next() {
+ throw new NoSuchElementException();
+ }
+
+ public void remove() {
+ throw new UnsupportedOperationException();
+ }
+ };
+ }
+
+ public boolean add(Map.Entry<String, Object> e) {
+ IBindings.this.put(e.getKey(), e.getValue());
+ return true;
+ }
+ };
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index b7e07ecdd6..9f841f2c44 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -3,29 +3,25 @@
* @author Alexander Spoon
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import Predef.{ println => _, _ }
import java.io.{ BufferedReader, FileReader }
-import java.util.concurrent.locks.ReentrantLock
-import scala.sys.process.Process
import session._
-import scala.util.Properties.{ jdkHome, javaVersion }
-import scala.tools.util.{ Javap }
import scala.annotation.tailrec
-import scala.collection.mutable.ListBuffer
-import scala.concurrent.ops
+import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-import interpreter._
import io.{ File, Directory }
-import scala.reflect.NameTransformer._
import util.ScalaClassLoader
import ScalaClassLoader._
import scala.tools.util._
import scala.language.{implicitConversions, existentials}
-import scala.reflect.{ClassTag, classTag}
-import scala.tools.reflect.StdRuntimeTags._
+import scala.reflect.classTag
+import StdReplTags._
+import scala.concurrent.{ ExecutionContext, Await, Future, future }
+import ExecutionContext.Implicits._
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -42,77 +38,41 @@ import scala.tools.reflect.StdRuntimeTags._
class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
extends AnyRef
with LoopCommands
- with ILoopInit
{
def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out)
def this() = this(None, new JPrintWriter(Console.out, true))
- var in: InteractiveReader = _ // the input stream from which commands come
- var settings: Settings = _
- var intp: IMain = _
-
@deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
@deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i
- /** Having inherited the difficult "var-ness" of the repl instance,
- * I'm trying to work around it by moving operations into a class from
- * which it will appear a stable prefix.
- */
- private def onIntp[T](f: IMain => T): T = f(intp)
-
- class IMainOps[T <: IMain](val intp: T) {
- import intp._
- import global._
-
- def printAfterTyper(msg: => String) =
- intp.reporter printUntruncatedMessage afterTyper(msg)
-
- /** Strip NullaryMethodType artifacts. */
- private def replInfo(sym: Symbol) = {
- sym.info match {
- case NullaryMethodType(restpe) if sym.isAccessor => restpe
- case info => info
- }
- }
- def echoTypeStructure(sym: Symbol) =
- printAfterTyper("" + deconstruct.show(replInfo(sym)))
+ var in: InteractiveReader = _ // the input stream from which commands come
+ var settings: Settings = _
+ var intp: IMain = _
- def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
- if (verbose) ILoop.this.echo("// Type signature")
- printAfterTyper("" + replInfo(sym))
+ private var globalFuture: Future[Boolean] = _
- if (verbose) {
- ILoop.this.echo("\n// Internal Type structure")
- echoTypeStructure(sym)
- }
- }
+ /** Print a welcome message */
+ def printWelcome() {
+ echo(s"""
+ |Welcome to Scala $versionString ($javaVmName, Java $javaVersion).
+ |Type in expressions to have them evaluated.
+ |Type :help for more information.""".trim.stripMargin
+ )
+ replinfo("[info] started at " + new java.util.Date)
}
- implicit def stabilizeIMain(intp: IMain) = new IMainOps[intp.type](intp)
- /** TODO -
- * -n normalize
- * -l label with case class parameter names
- * -c complete - leave nothing out
- */
- private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
- onIntp { intp =>
- val sym = intp.symbolOfLine(expr)
- if (sym.exists) intp.echoTypeSignature(sym, verbose)
- else ""
- }
+ protected def asyncMessage(msg: String) {
+ if (isReplInfo || isReplPower)
+ echoAndRefresh(msg)
}
override def echoCommandMessage(msg: String) {
intp.reporter printUntruncatedMessage msg
}
- def isAsync = !settings.Yreplsync.value
lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
def history = in.history
- /** The context class loader at the time this object was created */
- protected val originalClassLoader = Thread.currentThread.getContextClassLoader
-
// classpath entries added via :cp
var addedClasspath: String = ""
@@ -166,20 +126,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def helpCommand(line: String): Result = {
if (line == "") helpSummary()
else uniqueCommand(line) match {
- case Some(lc) => echo("\n" + lc.longHelp)
+ case Some(lc) => echo("\n" + lc.help)
case _ => ambiguousError(line)
}
}
private def helpSummary() = {
val usageWidth = commands map (_.usageMsg.length) max
- val formatStr = "%-" + usageWidth + "s %s %s"
+ val formatStr = "%-" + usageWidth + "s %s"
echo("All commands can be abbreviated, e.g. :he instead of :help.")
- echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
commands foreach { cmd =>
- val star = if (cmd.hasLongHelp) "*" else " "
- echo(formatStr.format(cmd.usageMsg, star, cmd.help))
+ echo(formatStr.format(cmd.usageMsg, cmd.help))
}
}
private def ambiguousError(cmd: String): Result = {
@@ -187,7 +145,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case Nil => echo(cmd + ": no such command. Type :help for help.")
case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
}
- Result(true, None)
+ Result(keepRunning = true, None)
}
private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
private def uniqueCommand(cmd: String): Option[LoopCommand] = {
@@ -229,10 +187,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
out println msg
out.flush()
}
- protected def echoNoNL(msg: String) = {
- out print msg
- out.flush()
- }
/** Search the history */
def searchHistory(_cmdline: String) {
@@ -243,8 +197,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
echo("%d %s".format(index + offset, line))
}
- private var currentPrompt = Properties.shellPromptString
- def setPrompt(prompt: String) = currentPrompt = prompt
+ private val currentPrompt = Properties.shellPromptString
+
/** Prompt to print when awaiting input */
def prompt = currentPrompt
@@ -257,17 +211,18 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
historyCommand,
cmd("h?", "<string>", "search the history", searchHistory),
cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
- cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
+ cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand),
cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
- nullary("quit", "exit the interpreter", () => Result(false, None)),
+ nullary("quit", "exit the interpreter", () => Result(keepRunning = false, None)),
nullary("replay", "reset execution and replay all previous commands", replay),
nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
shCommand,
nullary("silent", "disable/enable automatic printing of results", verbosity),
cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
+ cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
)
@@ -276,25 +231,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
)
- private def dumpCommand(): Result = {
- echo("" + power)
- history.asStrings takeRight 30 foreach echo
- in.redrawLine()
- }
- private def valsCommand(): Result = power.valsDescription
-
- private val typeTransforms = List(
- "scala.collection.immutable." -> "immutable.",
- "scala.collection.mutable." -> "mutable.",
- "scala.collection.generic." -> "generic.",
- "java.lang." -> "jl.",
- "scala.runtime." -> "runtime."
- )
-
private def importsCommand(line: String): Result = {
val tokens = words(line)
val handlers = intp.languageWildcardHandlers ++ intp.importHandlers
- val isVerbose = tokens contains "-v"
handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach {
case (handler, idx) =>
@@ -316,75 +255,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def implicitsCommand(line: String): Result = onIntp { intp =>
- import intp._
- import global._
-
- def p(x: Any) = intp.reporter.printMessage("" + x)
+ private def findToolsJar() = PathResolver.SupplementalLocations.platformTools
- // If an argument is given, only show a source with that
- // in its name somewhere.
- val args = line split "\\s+"
- val filtered = intp.implicitSymbolsBySource filter {
- case (source, syms) =>
- (args contains "-v") || {
- if (line == "") (source.fullName.toString != "scala.Predef")
- else (args exists (source.name.toString contains _))
- }
- }
-
- if (filtered.isEmpty)
- return "No implicits have been imported other than those in Predef."
-
- filtered foreach {
- case (source, syms) =>
- p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
-
- // This groups the members by where the symbol is defined
- val byOwner = syms groupBy (_.owner)
- val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
-
- sortedOwners foreach {
- case (owner, members) =>
- // Within each owner, we cluster results based on the final result type
- // if there are more than a couple, and sort each cluster based on name.
- // This is really just trying to make the 100 or so implicits imported
- // by default into something readable.
- val memberGroups: List[List[Symbol]] = {
- val groups = members groupBy (_.tpe.finalResultType) toList
- val (big, small) = groups partition (_._2.size > 3)
- val xss = (
- (big sortBy (_._1.toString) map (_._2)) :+
- (small flatMap (_._2))
- )
-
- xss map (xs => xs sortBy (_.name.toString))
- }
-
- val ownerMessage = if (owner == source) " defined in " else " inherited from "
- p(" /* " + members.size + ownerMessage + owner.fullName + " */")
-
- memberGroups foreach { group =>
- group foreach (s => p(" " + intp.symbolDefString(s)))
- p("")
- }
- }
- p("")
- }
- }
-
- private def findToolsJar() = {
- val jdkPath = Directory(jdkHome)
- val jar = jdkPath / "lib" / "tools.jar" toFile;
-
- if (jar isFile)
- Some(jar)
- else if (jdkPath.isDirectory)
- jdkPath.deepFiles find (_.name == "tools.jar")
- else None
- }
private def addToolsJarToLoader() = {
- val cl = findToolsJar match {
+ val cl = findToolsJar() match {
case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
case _ => intp.classLoader
}
@@ -398,42 +272,23 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- protected def newJavap() = new JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp)) {
- override def tryClass(path: String): Array[Byte] = {
- val hd :: rest = path split '.' toList;
- // If there are dots in the name, the first segment is the
- // key to finding it.
- if (rest.nonEmpty) {
- intp optFlatName hd match {
- case Some(flat) =>
- val clazz = flat :: rest mkString NAME_JOIN_STRING
- val bytes = super.tryClass(clazz)
- if (bytes.nonEmpty) bytes
- else super.tryClass(clazz + MODULE_SUFFIX_STRING)
- case _ => super.tryClass(path)
- }
- }
- else {
- // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
- // we have to drop the $ to find object Foo, then tack it back onto
- // the end of the flattened name.
- def className = intp flatName path
- def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
-
- val bytes = super.tryClass(className)
- if (bytes.nonEmpty) bytes
- else super.tryClass(moduleName)
- }
- }
- }
+ protected def newJavap() =
+ JavapClass(addToolsJarToLoader(), new IMain.ReplStrippingWriter(intp), Some(intp))
+
private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
// Still todo: modules.
private def typeCommand(line0: String): Result = {
line0.trim match {
- case "" => ":type [-v] <expression>"
- case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
- case s => typeCommandInternal(s, false)
+ case "" => ":type [-v] <expression>"
+ case s => intp.typeCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ")
+ }
+ }
+
+ private def kindCommand(expr: String): Result = {
+ expr.trim match {
+ case "" => ":kind [-v] <expression>"
+ case s => intp.kindCommandInternal(s stripPrefix "-v " trim, verbose = s startsWith "-v ")
}
}
@@ -447,8 +302,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
private def javapCommand(line: String): Result = {
if (javap == null)
":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
- else if (javaVersion startsWith "1.7")
- ":javap not yet working with java 1.7"
else if (line == "")
":javap [-lcsvp] [path1 path2 ...]"
else
@@ -458,37 +311,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
}
}
- private def wrapCommand(line: String): Result = {
- def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
- onIntp { intp =>
- import intp._
- import global._
-
- words(line) match {
- case Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => "Current execution wrapper: " + s
- }
- case "clear" :: Nil =>
- intp.executionWrapper match {
- case "" => "No execution wrapper is set."
- case s => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
- }
- case wrapper :: Nil =>
- intp.typeOfExpression(wrapper) match {
- case PolyType(List(targ), MethodType(List(arg), restpe)) =>
- intp setExecutionWrapper intp.pathToTerm(wrapper)
- "Set wrapper to '" + wrapper + "'"
- case tp =>
- failMsg + "\nFound: <unknown>"
- }
- case _ => failMsg
- }
- }
- }
+ private def pathToPhaseWrapper = intp.originalPath("$r") + ".phased.atCurrent"
- private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
private def phaseCommand(name: String): Result = {
val phased: Phased = power.phased
import phased.NoPhaseName
@@ -547,33 +371,30 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
true
}
+ // return false if repl should exit
+ def processLine(line: String): Boolean = {
+ import scala.concurrent.duration._
+ Await.ready(globalFuture, 60.seconds)
+
+ (line ne null) && (command(line) match {
+ case Result(false, _) => false
+ case Result(_, Some(line)) => addReplay(line) ; true
+ case _ => true
+ })
+ }
+
+ private def readOneLine() = {
+ out.flush()
+ in readLine prompt
+ }
+
/** The main read-eval-print loop for the repl. It calls
* command() for each line of input, and stops when
* command() returns false.
*/
- def loop() {
- def readOneLine() = {
- out.flush()
- in readLine prompt
- }
- // return false if repl should exit
- def processLine(line: String): Boolean = {
- if (isAsync) {
- if (!awaitInitialized()) return false
- runThunks()
- }
- if (line eq null) false // assume null means EOF
- else command(line) match {
- case Result(false, _) => false
- case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
- case _ => true
- }
- }
- def innerLoop() {
- if ( try processLine(readOneLine()) catch crashRecovery )
- innerLoop()
- }
- innerLoop()
+ @tailrec final def loop() {
+ if ( try processLine(readOneLine()) catch crashRecovery )
+ loop()
}
/** interpret all lines from a specified file */
@@ -581,7 +402,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
savingReader {
savingReplayStack {
file applyReader { reader =>
- in = SimpleReader(reader, out, false)
+ in = SimpleReader(reader, out, interactive = false)
echo("Loading " + file + "...")
loop()
}
@@ -645,7 +466,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
interpretAllFrom(f)
shouldReplay = Some(":load " + arg)
})
- Result(true, shouldReplay)
+ Result(keepRunning = true, shouldReplay)
}
def addClasspath(arg: String): Unit = {
@@ -661,7 +482,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
def powerCmd(): Result = {
if (isReplPower) "Already in power mode."
- else enablePowerMode(false)
+ else enablePowerMode(isDuringInit = false)
}
def enablePowerMode(isDuringInit: Boolean) = {
replProps.power setValue true
@@ -698,8 +519,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
case _ => ambiguousError(cmd)
}
}
- else if (intp.global == null) Result(false, None) // Notice failure to create compiler
- else Result(true, interpretStartingWith(line))
+ else if (intp.global == null) Result(keepRunning = false, None) // Notice failure to create compiler
+ else Result(keepRunning = true, interpretStartingWith(line))
}
private def readWhile(cond: String => Boolean) = {
@@ -807,10 +628,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
* with SimpleReader.
*/
def chooseReader(settings: Settings): InteractiveReader = {
- if (settings.Xnojline.value || Properties.isEmacsShell)
+ if (settings.Xnojline || Properties.isEmacsShell)
SimpleReader()
else try new JLineReader(
- if (settings.noCompletion.value) NoCompletion
+ if (settings.noCompletion) NoCompletion
else new JLineCompletion(intp)
)
catch {
@@ -819,48 +640,39 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
SimpleReader()
}
}
- def process(settings: Settings): Boolean = savingContextLoader {
- this.settings = settings
- createInterpreter()
- // sets in to some kind of reader depending on environmental cues
- in = in0 match {
- case Some(reader) => SimpleReader(reader, out, true)
- case None =>
- // some post-initialization
- chooseReader(settings) match {
- case x: JLineReader => addThunk(x.consoleReader.postInit) ; x
- case x => x
- }
+ private def loopPostInit() {
+ in match {
+ case x: JLineReader => x.consoleReader.postInit
+ case _ =>
}
// Bind intp somewhere out of the regular namespace where
// we can get at it in generated code.
- addThunk(intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])))
- addThunk({
- import scala.tools.nsc.io._
- import Properties.userHome
- import scala.compat.Platform.EOL
- val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
- if (autorun.isDefined) intp.quietRun(autorun.get)
- })
-
- loadFiles(settings)
- // it is broken on startup; go ahead and exit
- if (intp.reporter.hasErrors)
- return false
-
- // This is about the illusion of snappiness. We call initialize()
- // which spins off a separate thread, then print the prompt and try
- // our best to look ready. The interlocking lazy vals tend to
- // inter-deadlock, so we break the cycle with a single asynchronous
- // message to an actor.
- if (isAsync) {
- intp initialize initializedCallback()
- createAsyncListener() // listens for signal to run postInitialization
+ intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain]))
+ // Auto-run code via some setting.
+ ( replProps.replAutorunCode.option
+ flatMap (f => io.File(f).safeSlurp())
+ foreach (intp quietRun _)
+ )
+ // classloader and power mode setup
+ intp.setContextClassLoader()
+ if (isReplPower) {
+ replProps.power setValue true
+ unleashAndSetPhase()
+ asyncMessage(power.banner)
}
- else {
+ }
+ def process(settings: Settings): Boolean = savingContextLoader {
+ this.settings = settings
+ createInterpreter()
+
+ // sets in to some kind of reader depending on environmental cues
+ in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true))
+ globalFuture = future {
intp.initializeSynchronous()
- postInitialization()
+ loopPostInit()
+ loadFiles(settings)
+ !intp.reporter.hasErrors
}
printWelcome()
@@ -871,27 +683,12 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
true
}
- /** process command-line arguments and do as they request */
- def process(args: Array[String]): Boolean = {
- val command = new CommandLine(args.toList, echo)
- def neededHelp(): String =
- (if (command.settings.help.value) command.usageMsg + "\n" else "") +
- (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
-
- // if they asked for no help and command is valid, we call the real main
- neededHelp() match {
- case "" => command.ok && process(command.settings)
- case help => echoNoNL(help) ; true
- }
- }
-
@deprecated("Use `process` instead", "2.9.0")
- def main(settings: Settings): Unit = process(settings)
+ def main(settings: Settings): Unit = process(settings) //used by sbt
}
object ILoop {
implicit def loopToInterpreter(repl: ILoop): IMain = repl.intp
- private def echo(msg: String) = Console println msg
// Designed primarily for use by test code: take a String with a
// bunch of code, and prints out a transcript of what it would look
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala
index bed8570bd0..3a71930383 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala
@@ -3,7 +3,8 @@
* @author Martin Odersky
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import Predef.{ println => _, _ }
@@ -11,36 +12,25 @@ import util.stringFromWriter
import scala.reflect.internal.util._
import java.net.URL
import scala.sys.BooleanProp
-import io.VirtualDirectory
import scala.tools.nsc.io.AbstractFile
import reporters._
-import symtab.Flags
-import scala.reflect.internal.Names
import scala.tools.util.PathResolver
import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.typechecker.{ TypeStrings, StructuredTypeStrings }
import ScalaClassLoader.URLClassLoader
import scala.tools.nsc.util.Exceptional.unwrap
import scala.collection.{ mutable, immutable }
-import scala.util.control.Exception.{ ultimately }
+import scala.reflect.BeanProperty
+import scala.util.Properties.versionString
+import javax.script.{AbstractScriptEngine, Bindings, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException, CompiledScript, Compilable}
+import java.io.{ StringWriter, Reader }
+import java.util.Arrays
import IMain._
import java.util.concurrent.Future
-import typechecker.Analyzer
-import scala.language.implicitConversions
import scala.reflect.runtime.{ universe => ru }
import scala.reflect.{ ClassTag, classTag }
-import scala.tools.reflect.StdRuntimeTags._
-
-/** directory to save .class files to */
-private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("(memory)", None) {
- private def pp(root: AbstractFile, indentLevel: Int) {
- val spaces = " " * indentLevel
- out.println(spaces + root.name)
- if (root.isDirectory)
- root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
- }
- // print the contents hierarchically
- def show() = pp(this, 0)
-}
+import StdReplTags._
+import scala.language.implicitConversions
/** An interpreter for Scala code.
*
@@ -74,19 +64,23 @@ private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("
* @author Moez A. Abdel-Gawad
* @author Lex Spoon
*/
-class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports {
+class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Settings, protected val out: JPrintWriter) extends AbstractScriptEngine with Compilable with Imports {
imain =>
- /** Leading with the eagerly evaluated.
- */
- val virtualDirectory: VirtualDirectory = new ReplVirtualDirectory(out) // "directory" for classfiles
- private var currentSettings: Settings = initialSettings
- private[nsc] var printResults = true // whether to print result lines
- private[nsc] var totalSilence = false // whether to print anything
- private var _initializeComplete = false // compiler is initialized
- private var _isInitialized: Future[Boolean] = null // set up initialization future
- private var bindExceptions = true // whether to bind the lastException variable
- private var _executionWrapper = "" // code to be wrapped around all lines
+ setBindings(createBindings, ScriptContext.ENGINE_SCOPE)
+ object replOutput extends ReplOutput(settings.Yreploutdir) { }
+
+ @deprecated("Use replOutput.dir instead", "2.11.0")
+ def virtualDirectory = replOutput.dir
+ // Used in a test case.
+ def showDirectory() = replOutput.show(out)
+
+ private[nsc] var printResults = true // whether to print result lines
+ private[nsc] var totalSilence = false // whether to print anything
+ private var _initializeComplete = false // compiler is initialized
+ private var _isInitialized: Future[Boolean] = null // set up initialization future
+ private var bindExceptions = true // whether to bind the lastException variable
+ private var _executionWrapper = "" // code to be wrapped around all lines
/** We're going to go to some trouble to initialize the compiler asynchronously.
* It's critical that nothing call into it until it's been initialized or we will
@@ -95,23 +89,14 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* use a lazy val to ensure that any attempt to use the compiler object waits
* on the future.
*/
- private var _classLoader: AbstractFileClassLoader = null // active classloader
- private val _compiler: Global = newCompiler(settings, reporter) // our private compiler
-
- private val nextReqId = {
- var counter = 0
- () => { counter += 1 ; counter }
- }
+ private var _classLoader: util.AbstractFileClassLoader = null // active classloader
+ private val _compiler: ReplGlobal = newCompiler(settings, reporter) // our private compiler
def compilerClasspath: Seq[URL] = (
if (isInitializeComplete) global.classPath.asURLs
else new PathResolver(settings).result.asURLs // the compiler's classpath
)
- def settings = currentSettings
- def mostRecentLine = prevRequestList match {
- case Nil => ""
- case req :: _ => req.originalLine
- }
+ def settings = initialSettings
// Run the code body with the given boolean settings flipped to true.
def withoutWarnings[T](body: => T): T = beQuietDuring {
val saved = settings.nowarn.value
@@ -123,15 +108,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
/** construct an interpreter that reports to Console */
+ def this(settings: Settings, out: JPrintWriter) = this(null, settings, out)
+ def this(factory: ScriptEngineFactory, settings: Settings) = this(factory, settings, new NewLinePrintWriter(new ConsoleWriter, true))
def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+ def this(factory: ScriptEngineFactory) = this(factory, new Settings())
def this() = this(new Settings())
- lazy val repllog: Logger = new Logger {
- val out: JPrintWriter = imain.out
- val isInfo: Boolean = BooleanProp keyExists "scala.repl.info"
- val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug"
- val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace"
- }
lazy val formatting: Formatting = new Formatting {
val prompt = Properties.shellPromptString
}
@@ -153,6 +135,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
catch AbstractOrMissingHandler()
}
private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
+ private val logScope = scala.sys.props contains "scala.repl.scope"
+ private def scopelog(msg: String) = if (logScope) Console.err.println(msg)
// argument is a thunk to execute after init is done
def initialize(postInitSignal: => Unit) {
@@ -173,29 +157,28 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
def isInitializeComplete = _initializeComplete
- /** the public, go through the future compiler */
lazy val global: Global = {
- if (isInitializeComplete) _compiler
- else {
- // If init hasn't been called yet you're on your own.
- if (_isInitialized == null) {
- repldbg("Warning: compiler accessed before init set up. Assuming no postInit code.")
- initialize(())
- }
- // blocks until it is ; false means catastrophic failure
- if (_isInitialized.get()) _compiler
- else null
- }
+ if (!isInitializeComplete) _initialize()
+ _compiler
}
- @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
- lazy val compiler: global.type = global
import global._
- import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
- import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
+ import definitions.{ ObjectClass, termMember, dropNullaryMethod}
+
+ lazy val runtimeMirror = ru.runtimeMirror(classLoader)
+
+ private def noFatal(body: => Symbol): Symbol = try body catch { case _: FatalError => NoSymbol }
+
+ def getClassIfDefined(path: String) = (
+ noFatal(runtimeMirror staticClass path)
+ orElse noFatal(rootMirror staticClass path)
+ )
+ def getModuleIfDefined(path: String) = (
+ noFatal(runtimeMirror staticModule path)
+ orElse noFatal(rootMirror staticModule path)
+ )
implicit class ReplTypeOps(tp: Type) {
- def orElse(other: => Type): Type = if (tp ne NoType) tp else other
def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
}
@@ -208,10 +191,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// make sure we don't overwrite their unwisely named res3 etc.
def freshUserTermName(): TermName = {
val name = newTermName(freshUserVarName())
- if (definedNameMap contains name) freshUserTermName()
+ if (replScope containsName name) freshUserTermName()
else name
}
- def isUserTermName(name: Name) = isUserVarName("" + name)
def isInternalTermName(name: Name) = isInternalVarName("" + name)
}
import naming._
@@ -260,11 +242,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Instantiate a compiler. Overridable. */
protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
- settings.outputDirs setSingleOutput virtualDirectory
+ settings.outputDirs setSingleOutput replOutput.dir
settings.exposeEmptyPackage.value = true
- new Global(settings, reporter) with ReplGlobal {
- override def toString: String = "<global>"
- }
+ new Global(settings, reporter) with ReplGlobal { override def toString: String = "<global>" }
}
/** Parent classloader. Overridable. */
@@ -293,57 +273,75 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
if (_classLoader == null)
_classLoader = makeClassLoader()
}
- def classLoader: AbstractFileClassLoader = {
+ def classLoader: util.AbstractFileClassLoader = {
ensureClassLoader()
_classLoader
}
- private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
+
+ def backticked(s: String): String = (
+ (s split '.').toList map {
+ case "_" => "_"
+ case s if nme.keywords(newTermName(s)) => s"`$s`"
+ case s => s
+ } mkString "."
+ )
+
+ abstract class PhaseDependentOps {
+ def shift[T](op: => T): T
+
+ def path(name: => Name): String = shift(path(symbolOfName(name)))
+ def path(sym: Symbol): String = backticked(shift(sym.fullName))
+ def sig(sym: Symbol): String = shift(sym.defString)
+ }
+ object typerOp extends PhaseDependentOps {
+ def shift[T](op: => T): T = exitingTyper(op)
+ }
+ object flatOp extends PhaseDependentOps {
+ def shift[T](op: => T): T = exitingFlatten(op)
+ }
+
+ def originalPath(name: String): String = originalPath(name: TermName)
+ def originalPath(name: Name): String = typerOp path name
+ def originalPath(sym: Symbol): String = typerOp path sym
+ def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName
+ def translatePath(path: String) = {
+ val sym = if (path endsWith "$") symbolOfTerm(path.init) else symbolOfIdent(path)
+ sym match {
+ case NoSymbol => None
+ case _ => Some(flatPath(sym))
+ }
+ }
+ def translateEnclosingClass(n: String) = {
+ def enclosingClass(s: Symbol): Symbol =
+ if (s == NoSymbol || s.isClass) s else enclosingClass(s.owner)
+ enclosingClass(symbolOfTerm(n)) match {
+ case NoSymbol => None
+ case c => Some(flatPath(c))
+ }
+ }
+
+ private class TranslatingClassLoader(parent: ClassLoader) extends util.AbstractFileClassLoader(replOutput.dir, parent) {
/** Overridden here to try translating a simple name to the generated
* class name if the original attempt fails. This method is used by
* getResourceAsStream as well as findClass.
*/
- override protected def findAbstractFile(name: String): AbstractFile = {
+ override protected def findAbstractFile(name: String): AbstractFile =
super.findAbstractFile(name) match {
- // deadlocks on startup if we try to translate names too early
- case null if isInitializeComplete =>
- generatedName(name) map (x => super.findAbstractFile(x)) orNull
- case file =>
- file
+ case null => translatePath(name) map (super.findAbstractFile(_)) orNull
+ case file => file
}
- }
}
- private def makeClassLoader(): AbstractFileClassLoader =
+ private def makeClassLoader(): util.AbstractFileClassLoader =
new TranslatingClassLoader(parentClassLoader match {
case null => ScalaClassLoader fromURLs compilerClasspath
case p => new URLClassLoader(compilerClasspath, p)
})
- def getInterpreterClassLoader() = classLoader
-
// Set the current Java "context" class loader to this interpreter's class loader
def setContextClassLoader() = classLoader.setAsContext()
- /** Given a simple repl-defined name, returns the real name of
- * the class representing it, e.g. for "Bippy" it may return
- * {{{
- * $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
- * }}}
- */
- def generatedName(simpleName: String): Option[String] = {
- if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
- else optFlatName(simpleName)
- }
- def flatName(id: String) = optFlatName(id) getOrElse id
- def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
-
- def allDefinedNames = definedNameMap.keys.toList.sorted
- def pathToType(id: String): String = pathToName(newTypeName(id))
- def pathToTerm(id: String): String = pathToName(newTermName(id))
- def pathToName(name: Name): String = {
- if (definedNameMap contains name)
- definedNameMap(name) fullPath name
- else name.toString
- }
+ def allDefinedNames: List[Name] = exitingTyper(replScope.toList.map(_.name).sorted)
+ def unqualifiedIds: List[String] = allDefinedNames map (_.decode) sorted
/** Most recent tree handled which wasn't wholly synthetic. */
private def mostRecentlyHandledTree: Option[Tree] = {
@@ -356,72 +354,55 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
None
}
- /** Stubs for work in progress. */
- def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
- for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
- repldbg("Redefining type '%s'\n %s -> %s".format(name, t1, t2))
- }
- }
+ private def updateReplScope(sym: Symbol, isDefined: Boolean) {
+ def log(what: String) {
+ val mark = if (sym.isType) "t " else "v "
+ val name = exitingTyper(sym.nameString)
+ val info = cleanTypeAfterTyper(sym)
+ val defn = sym defStringSeenAs info
- def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
- for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
- // Printing the types here has a tendency to cause assertion errors, like
- // assertion failed: fatal: <refinement> has owner value x, but a class owner is required
- // so DBG is by-name now to keep it in the family. (It also traps the assertion error,
- // but we don't want to unnecessarily risk hosing the compiler's internal state.)
- repldbg("Redefining term '%s'\n %s -> %s".format(name, t1, t2))
+ scopelog(f"[$mark$what%6s] $name%-25s $defn%s")
}
+ if (ObjectClass isSubClass sym.owner) return
+ // unlink previous
+ replScope lookupAll sym.name foreach { sym =>
+ log("unlink")
+ replScope unlink sym
+ }
+ val what = if (isDefined) "define" else "import"
+ log(what)
+ replScope enter sym
}
def recordRequest(req: Request) {
- if (req == null || referencedNameMap == null)
+ if (req == null)
return
prevRequests += req
- req.referencedNames foreach (x => referencedNameMap(x) = req)
// warning about serially defining companions. It'd be easy
// enough to just redefine them together but that may not always
// be what people want so I'm waiting until I can do it better.
- for {
- name <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
- oldReq <- definedNameMap get name.companionName
- newSym <- req.definedSymbols get name
- oldSym <- oldReq.definedSymbols get name.companionName
- if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
- } {
- afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
- replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
- }
-
- // Updating the defined name map
- req.definedNames foreach { name =>
- if (definedNameMap contains name) {
- if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
- else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
+ exitingTyper {
+ req.defines filterNot (s => req.defines contains s.companionSymbol) foreach { newSym =>
+ val oldSym = replScope lookup newSym.name.companionName
+ if (Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }) {
+ replwarn(s"warning: previously defined $oldSym is not a companion to $newSym.")
+ replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
+ }
}
- definedNameMap(name) = req
+ }
+ exitingTyper {
+ req.imports foreach (sym => updateReplScope(sym, isDefined = false))
+ req.defines foreach (sym => updateReplScope(sym, isDefined = true))
}
}
private[nsc] def replwarn(msg: => String) {
- if (!settings.nowarnings.value)
+ if (!settings.nowarnings)
printMessage(msg)
}
- def isParseable(line: String): Boolean = {
- beSilentDuring {
- try parse(line) match {
- case Some(xs) => xs.nonEmpty // parses as-is
- case None => true // incomplete
- }
- catch { case x: Exception => // crashed the compiler
- replwarn("Exception in isParseable(\"" + line + "\"): " + x)
- false
- }
- }
- }
-
def compileSourcesKeepingRun(sources: SourceFile*) = {
val run = new Run()
reporter.reset()
@@ -448,18 +429,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
executingRequest
}
- // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
- // the close brace is commented out. Strip single-line comments.
- // ... but for error message output reasons this is not used, and rather than
- // enclosing in braces it is constructed like "val x =\n5 // foo".
- private def removeComments(line: String): String = {
- showCodeIfDebugging(line) // as we're about to lose our // show
- line.lines map (s => s indexOf "//" match {
- case -1 => s
- case idx => s take idx
- }) mkString "\n"
- }
-
private def safePos(t: Tree, alt: Int): Int =
try t.pos.startOrPoint
catch { case _: UnsupportedOperationException => alt }
@@ -551,8 +520,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
Right(buildRequest(line, trees))
}
- // normalize non-public types so we don't see protected aliases like Self
- def normalizeNonPublic(tp: Type) = tp match {
+ // dealias non-public types so we don't see protected aliases like Self
+ def dealiasNonPublic(tp: Type) = tp match {
case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
case _ => tp
}
@@ -565,11 +534,92 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* The return value is whether the line was interpreter successfully,
* e.g. that there were no parse errors.
*/
- def interpret(line: String): IR.Result = interpret(line, false)
- def interpretSynthetic(line: String): IR.Result = interpret(line, true)
- def interpret(line: String, synthetic: Boolean): IR.Result = {
- def loadAndRunReq(req: Request) = {
- classLoader.setAsContext()
+ def interpret(line: String): IR.Result = interpret(line, synthetic = false)
+ def interpretSynthetic(line: String): IR.Result = interpret(line, synthetic = true)
+ def interpret(line: String, synthetic: Boolean): IR.Result = compile(line, synthetic) match {
+ case Left(result) => result
+ case Right(req) => new WrappedRequest(req).loadAndRunReq
+ }
+
+ private def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
+ if (global == null) Left(IR.Error)
+ else requestFromLine(line, synthetic) match {
+ case Left(result) => Left(result)
+ case Right(req) =>
+ // null indicates a disallowed statement type; otherwise compile and
+ // fail if false (implying e.g. a type error)
+ if (req == null || !req.compile) Left(IR.Error) else Right(req)
+ }
+ }
+
+ var code = ""
+ var bound = false
+ @throws(classOf[ScriptException])
+ def compile(script: String): CompiledScript = {
+ if (!bound) {
+ quietBind("engine" -> this.asInstanceOf[ScriptEngine])
+ bound = true
+ }
+ val cat = code + script
+ compile(cat, false) match {
+ case Left(result) => result match {
+ case IR.Incomplete => {
+ code = cat + "\n"
+ new CompiledScript {
+ def eval(context: ScriptContext): Object = null
+ def getEngine: ScriptEngine = IMain.this
+ }
+ }
+ case _ => {
+ code = ""
+ throw new ScriptException("compile-time error")
+ }
+ }
+ case Right(req) => {
+ code = ""
+ new WrappedRequest(req)
+ }
+ }
+ }
+
+ @throws(classOf[ScriptException])
+ def compile(reader: Reader): CompiledScript = {
+ val writer = new StringWriter()
+ var c = reader.read()
+ while(c != -1) {
+ writer.write(c)
+ c = reader.read()
+ }
+ reader.close()
+ compile(writer.toString())
+ }
+
+ private class WrappedRequest(val req: Request) extends CompiledScript {
+ var recorded = false
+
+ /** In Java we would have to wrap any checked exception in the declared
+ * ScriptException. Runtime exceptions and errors would be ok and would
+ * not need to be caught. So let us do the same in Scala : catch and
+ * wrap any checked exception, and let runtime exceptions and errors
+ * escape. We could have wrapped runtime exceptions just like other
+ * exceptions in ScriptException, this is a choice.
+ */
+ @throws(classOf[ScriptException])
+ def eval(context: ScriptContext): Object = {
+ val result = req.lineRep.evalEither match {
+ case Left(e: RuntimeException) => throw e
+ case Left(e: Exception) => throw new ScriptException(e)
+ case Left(e) => throw e
+ case Right(result) => result.asInstanceOf[Object]
+ }
+ if (!recorded) {
+ recordRequest(req)
+ recorded = true
+ }
+ result
+ }
+
+ def loadAndRunReq = classLoader.asContext {
val (result, succeeded) = req.loadAndRun
/** To our displeasure, ConsoleReporter offers only printMessage,
@@ -594,15 +644,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
}
- if (global == null) IR.Error
- else requestFromLine(line, synthetic) match {
- case Left(result) => result
- case Right(req) =>
- // null indicates a disallowed statement type; otherwise compile and
- // fail if false (implying e.g. a type error)
- if (req == null || !req.compile) IR.Error
- else loadAndRunReq(req)
- }
+ def getEngine: ScriptEngine = IMain.this
}
/** Bind a specified name to a specified value. The name may
@@ -615,7 +657,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
*/
def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
val bindRep = new ReadEvalPrint()
- val run = bindRep.compile("""
+ bindRep.compile("""
|object %s {
| var value: %s = _
| def set(x: Any) = value = x.asInstanceOf[%s]
@@ -645,24 +687,15 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def rebind(p: NamedParam): IR.Result = {
val name = p.name
- val oldType = typeOfTerm(name) orElse { return IR.Error }
val newType = p.tpe
val tempName = freshInternalVarName()
quietRun("val %s = %s".format(tempName, name))
quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
}
- def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
- def addImports(ids: String*): IR.Result =
- if (ids.isEmpty) IR.Success
- else interpret("import " + ids.mkString(", "))
-
def quietBind(p: NamedParam): IR.Result = beQuietDuring(bind(p))
def bind(p: NamedParam): IR.Result = bind(p.name, p.tpe, p.value)
def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
- def bindSyntheticValue(x: Any): IR.Result = bindValue(freshInternalVarName(), x)
- def bindValue(x: Any): IR.Result = bindValue(freshUserVarName(), x)
- def bindValue(name: String, x: Any): IR.Result = bind(name, TypeStrings.fromValue(x), x)
/** Reset this interpreter, forgetting all user-specified requests. */
def reset() {
@@ -670,9 +703,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
resetClassLoader()
resetAllCreators()
prevRequests.clear()
- referencedNameMap.clear()
- definedNameMap.clear()
- virtualDirectory.clear()
+ resetReplScope()
+ replOutput.dir.clear()
}
/** This instance is no longer needed, so release any resources
@@ -693,10 +725,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
class ReadEvalPrint(lineId: Int) {
def this() = this(freshLineId())
- private var lastRun: Run = _
- private var evalCaught: Option[Throwable] = None
- private var conditionalWarnings: List[ConditionalWarning] = Nil
-
val packageName = sessionNames.line + lineId
val readName = sessionNames.read
val evalName = sessionNames.eval
@@ -723,7 +751,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def readPath = pathTo(readName)
def evalPath = pathTo(evalName)
- def printPath = pathTo(printName)
def call(name: String, args: Any*): AnyRef = {
val m = evalMethod(name)
@@ -738,10 +765,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
try Right(call(name, args: _*))
catch { case ex: Throwable => Left(ex) }
- def callOpt(name: String, args: Any*): Option[AnyRef] =
- try Some(call(name, args: _*))
- catch { case ex: Throwable => bindError(ex) ; None }
-
class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
private def evalError(path: String, ex: Throwable) =
@@ -753,9 +776,13 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
lazy val evalClass = load(evalPath)
- lazy val evalValue = callEither(resultName) match {
- case Left(ex) => evalCaught = Some(ex) ; None
- case Right(result) => Some(result)
+
+ def evalEither = callEither(resultName) match {
+ case Left(ex) => ex match {
+ case ex: NullPointerException => Right(null)
+ case ex => Left(unwrap(ex))
+ }
+ case Right(result) => Right(result)
}
def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
@@ -764,10 +791,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
* following accessPath into the outer one.
*/
def resolvePathToSymbol(accessPath: String): Symbol = {
- val readRoot = getRequiredModule(readPath) // the outermost wrapper
+ val readRoot = getModuleIfDefined(readPath) // the outermost wrapper
(accessPath split '.').foldLeft(readRoot: Symbol) {
case (sym, "") => sym
- case (sym, name) => afterTyper(termMember(sym, name))
+ case (sym, name) => exitingTyper(termMember(sym, name))
}
}
/** We get a bunch of repeated warnings for reasons I haven't
@@ -793,6 +820,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
mostRecentWarnings = warnings
}
private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
+ case Array() => null
case Array(method) => method
case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
}
@@ -800,15 +828,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
showCodeIfDebugging(code)
val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
updateRecentWarnings(run)
- lastRun = run
success
}
}
/** One line of code submitted by the user for interpretation */
- // private
class Request(val line: String, val trees: List[Tree]) {
- val reqId = nextReqId()
+ def defines = defHandlers flatMap (_.definedSymbols)
+ def imports = importedSymbols
+ def value = Some(handlers.last) filter (h => h.definesValue) map (h => definedSymbols(h.definesTerm.get)) getOrElse NoSymbol
+
val lineRep = new ReadEvalPrint()
private var _originalLine: String = null
@@ -819,50 +848,31 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
def defHandlers = handlers collect { case x: MemberDefHandler => x }
- /** all (public) names defined by these statements */
- val definedNames = handlers flatMap (_.definedNames)
-
/** list of names used by this expression */
val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
/** def and val names */
def termNames = handlers flatMap (_.definesTerm)
def typeNames = handlers flatMap (_.definesType)
- def definedOrImported = handlers flatMap (_.definedOrImported)
- def definedSymbolList = defHandlers flatMap (_.definedSymbols)
-
- def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
- def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
+ def importedSymbols = handlers flatMap {
+ case x: ImportHandler => x.importedSymbols
+ case _ => Nil
+ }
/** Code to import bound names from previous lines - accessPath is code to
* append to objectName to access anything bound by request.
*/
val ComputedImports(importsPreamble, importsTrailer, accessPath) =
- importsCode(referencedNames.toSet)
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: String) = (
- lineRep.readPath + accessPath + ".`%s`".format(vname)
- )
- /** Same as fullpath, but after it has been flattened, so:
- * $line5.$iw.$iw.$iw.Bippy // fullPath
- * $line5.$iw$$iw$$iw$Bippy // fullFlatName
- */
- def fullFlatName(name: String) =
- lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
-
- /** The unmangled symbol name, but supplemented with line info. */
- def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
-
- /** Code to access a variable with the specified name */
- def fullPath(vname: Name): String = fullPath(vname.toString)
+ exitingTyper(importsCode(referencedNames.toSet))
/** the line of code to compute */
def toCompute = line
+ def fullPath(vname: String) = s"${lineRep.readPath}$accessPath.`$vname`"
+
/** generate the source code for the object that computes this request */
private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
- def path = pathToTerm("$intp")
+ def path = originalPath("$intp")
def envLines = {
if (!isReplPower) Nil // power mode only for now
// $intp is not bound; punt, but include the line.
@@ -872,8 +882,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
)
else List(
"def $line = " + tquoted(originalLine),
- "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
- "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
+ "def $trees = Nil"
)
}
@@ -889,19 +898,16 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** We only want to generate this code when the result
* is a value which can be referred to as-is.
*/
- val evalResult =
- if (!handlers.last.definesValue) ""
- else handlers.last.definesTerm match {
- case Some(vname) if typeOf contains vname =>
- "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
- case _ => ""
- }
+ val evalResult = Request.this.value match {
+ case NoSymbol => ""
+ case sym => "lazy val %s = %s".format(lineRep.resultName, originalPath(sym))
+ }
// first line evaluates object to make sure constructor is run
// initial "" so later code can uniformly be: + etc
val preamble = """
|object %s {
| %s
- | val %s: String = %s {
+ | lazy val %s: String = %s {
| %s
| (""
""".stripMargin.format(
@@ -917,15 +923,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val generate = (m: MemberHandler) => m resultExtractionCode Request.this
}
- // get it
- def getEvalTyped[T] : Option[T] = getEval map (_.asInstanceOf[T])
- def getEval: Option[AnyRef] = {
- // ensure it has been compiled
- compile
- // try to load it and call the value method
- lineRep.evalValue filterNot (_ == null)
- }
-
/** Compile the object file. Returns whether the compilation succeeded.
* If all goes well, the "types" map is computed. */
lazy val compile: Boolean = {
@@ -944,7 +941,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
val name = dh.member.name
definedSymbols get name foreach { sym =>
dh.member setSymbol sym
- repldbg("Set symbol of " + name + " to " + sym.defString)
+ repldbg("Set symbol of " + name + " to " + symbolDefString(sym))
}
}
@@ -954,11 +951,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
}
lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
- def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+ def applyToResultMember[T](name: Name, f: Symbol => T) = exitingTyper(f(resultSymbol.info.nonPrivateDecl(name)))
/* typeOf lookup with encoding */
def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
- def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
private def typeMap[T](f: Type => T) =
mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
@@ -966,11 +962,8 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Types of variables defined by this request. */
lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
/** String representations of same. */
- lazy val typeOf = typeMap[String](tp => afterTyper(tp.toString))
+ lazy val typeOf = typeMap[String](tp => exitingTyper(tp.toString))
- // lazy val definedTypes: Map[Name, Type] = {
- // typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
- // }
lazy val definedSymbols = (
termNames.map(x => x -> applyToResultMember(x, x => x)) ++
typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
@@ -987,6 +980,27 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
}
+ def createBindings: Bindings = new IBindings {
+ override def put(name: String, value: Object): Object = {
+ val n = name.indexOf(":")
+ val p: NamedParam = if (n < 0) (name, value) else {
+ val nme = name.substring(0, n).trim
+ val tpe = name.substring(n + 1).trim
+ NamedParamClass(nme, tpe, value)
+ }
+ if (!p.name.startsWith("javax.script")) bind(p)
+ null
+ }
+ }
+
+ @throws(classOf[ScriptException])
+ def eval(script: String, context: ScriptContext): Object = compile(script).eval(context)
+
+ @throws(classOf[ScriptException])
+ def eval(reader: Reader, context: ScriptContext): Object = compile(reader).eval(context)
+
+ override def finalize = close
+
/** Returns the name of the most recent interpreter result.
* Mostly this exists so you can conveniently invoke methods on
* the previous result.
@@ -1003,45 +1017,48 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
private var mostRecentWarnings: List[(global.Position, String)] = Nil
def lastWarnings = mostRecentWarnings
- def treesForRequestId(id: Int): List[Tree] =
- requestForReqId(id).toList flatMap (_.trees)
-
- def requestForReqId(id: Int): Option[Request] =
- if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
- else prevRequests find (_.reqId == id)
+ private lazy val importToGlobal = global mkImporter ru
+ private lazy val importToRuntime = ru mkImporter global
+ private lazy val javaMirror = ru.rootMirror match {
+ case x: ru.JavaMirror => x
+ case _ => null
+ }
+ private implicit def importFromRu(sym: ru.Symbol): Symbol = importToGlobal importSymbol sym
+ private implicit def importToRu(sym: Symbol): ru.Symbol = importToRuntime importSymbol sym
- def requestForName(name: Name): Option[Request] = {
- assert(definedNameMap != null, "definedNameMap is null")
- definedNameMap get name
+ def classOfTerm(id: String): Option[JClass] = symbolOfTerm(id) match {
+ case NoSymbol => None
+ case sym => Some(javaMirror runtimeClass importToRu(sym).asClass)
}
- def requestForIdent(line: String): Option[Request] =
- requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
+ def typeOfTerm(id: String): Type = symbolOfTerm(id).tpe
- def requestHistoryForName(name: Name): List[Request] =
- prevRequests.toList.reverse filter (_.definedNames contains name)
+ def valueOfTerm(id: String): Option[Any] = exitingTyper {
+ def value() = {
+ val sym0 = symbolOfTerm(id)
+ val sym = (importToRuntime importSymbol sym0).asTerm
+ val module = runtimeMirror.reflectModule(sym.owner.companionSymbol.asModule).instance
+ val module1 = runtimeMirror.reflect(module)
+ val invoker = module1.reflectField(sym)
- def definitionForName(name: Name): Option[MemberHandler] =
- requestForName(name) flatMap { req =>
- req.handlers find (_.definedNames contains name)
+ invoker.get
}
- def valueOfTerm(id: String): Option[AnyRef] =
- requestForName(newTermName(id)) flatMap (_.getEval)
-
- def classOfTerm(id: String): Option[JClass] =
- valueOfTerm(id) map (_.getClass)
-
- def typeOfTerm(id: String): Type = newTermName(id) match {
- case nme.ROOTPKG => RootClass.tpe
- case name => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
+ try Some(value()) catch { case _: Exception => None }
}
- def symbolOfType(id: String): Symbol =
- requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
+ /** It's a bit of a shotgun approach, but for now we will gain in
+ * robustness. Try a symbol-producing operation at phase typer, and
+ * if that is NoSymbol, try again at phase flatten. I'll be able to
+ * lose this and run only from exitingTyper as soon as I figure out
+ * exactly where a flat name is sneaking in when calculating imports.
+ */
+ def tryTwice(op: => Symbol): Symbol = exitingTyper(op) orElse exitingFlatten(op)
- def symbolOfTerm(id: String): Symbol =
- requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
+ def symbolOfIdent(id: String): Symbol = symbolOfType(id) orElse symbolOfTerm(id)
+ def symbolOfType(id: String): Symbol = tryTwice(replScope lookup (id: TypeName))
+ def symbolOfTerm(id: String): Symbol = tryTwice(replScope lookup (id: TermName))
+ def symbolOfName(id: Name): Symbol = replScope lookup id
def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
classOfTerm(id) flatMap { clazz =>
@@ -1062,14 +1079,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
else NoType
}
}
- def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
- normalizeNonPublic {
- owner.info.nonPrivateDecl(member).tpe match {
- case NullaryMethodType(tp) => tp
- case tp => tp
- }
- }
+
+ def cleanTypeAfterTyper(sym: => Symbol): Type = {
+ exitingTyper(
+ dealiasNonPublic(
+ dropNullaryMethod(
+ sym.tpe_*
+ )
+ )
+ )
}
+ def cleanMemberDecl(owner: Symbol, member: Name): Type =
+ cleanTypeAfterTyper(owner.info nonPrivateDecl member)
object exprTyper extends {
val repl: IMain.this.type = imain
@@ -1083,64 +1104,35 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def typeOfExpression(expr: String, silent: Boolean = true): Type =
exprTyper.typeOfExpression(expr, silent)
- protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
- protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
+ protected def onlyTerms(xs: List[Name]): List[TermName] = xs collect { case x: TermName => x }
+ protected def onlyTypes(xs: List[Name]): List[TypeName] = xs collect { case x: TypeName => x }
def definedTerms = onlyTerms(allDefinedNames) filterNot isInternalTermName
def definedTypes = onlyTypes(allDefinedNames)
- def definedSymbols = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
- def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
+ def definedSymbolList = prevRequestList flatMap (_.defines) filterNot (s => isInternalTermName(s.name))
// Terms with user-given names (i.e. not res0 and not synthetic)
def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
- private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
-
- /** Translate a repl-defined identifier into a Symbol.
- */
- def apply(name: String): Symbol =
- types(name) orElse terms(name)
-
- def types(name: String): Symbol = {
- val tpname = newTypeName(name)
- findName(tpname) orElse getClassIfDefined(tpname)
- }
- def terms(name: String): Symbol = {
- val termname = newTypeName(name)
- findName(termname) orElse getModuleIfDefined(termname)
+ private var _replScope: Scope = _
+ private def resetReplScope() {
+ _replScope = newScope
}
- // [Eugene to Paul] possibly you could make use of TypeTags here
- def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
- def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
- def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
+ def replScope = {
+ if (_replScope eq null)
+ _replScope = newScope
- def classSymbols = allDefSymbols collect { case x: ClassSymbol => x }
- def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
+ _replScope
+ }
- /** the previous requests this interpreter has processed */
private var executingRequest: Request = _
private val prevRequests = mutable.ListBuffer[Request]()
- private val referencedNameMap = mutable.Map[Name, Request]()
- private val definedNameMap = mutable.Map[Name, Request]()
private val directlyBoundNames = mutable.Set[Name]()
- def allHandlers = prevRequestList flatMap (_.handlers)
- def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
- def allDefSymbols = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
-
- def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
- def prevRequestList = prevRequests.toList
- def allSeenTypes = prevRequestList flatMap (_.typeOf.values.toList) distinct
- def allImplicits = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
- def importHandlers = allHandlers collect { case x: ImportHandler => x }
-
- def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
-
- /** Another entry point for tab-completion, ids in scope */
- def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
-
- /** Parse the ScalaSig to find type aliases */
- def aliasForType(path: String) = ByteCode.aliasForType(path)
+ def allHandlers = prevRequestList flatMap (_.handlers)
+ def lastRequest = if (prevRequests.isEmpty) null else prevRequests.last
+ def prevRequestList = prevRequests.toList
+ def importHandlers = allHandlers collect { case x: ImportHandler => x }
def withoutUnwrapping(op: => Unit): Unit = {
val saved = isettings.unwrapStrings
@@ -1151,7 +1143,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
def symbolDefString(sym: Symbol) = {
TypeStrings.quieter(
- afterTyper(sym.defString),
+ exitingTyper(sym.defString),
sym.owner.name + ".this.",
sym.owner.fullName + "."
)
@@ -1161,13 +1153,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Secret bookcase entrance for repl debuggers: end the line
* with "// show" and see what's going on.
*/
- def isShow = code.lines exists (_.trim endsWith "// show")
- def isShowRaw = code.lines exists (_.trim endsWith "// raw")
-
- // old style
- beSilentDuring(parse(code)) foreach { ts =>
- ts foreach { t =>
- withoutUnwrapping(repldbg(asCompactString(t)))
+ def isShow = code.lines exists (_.trim endsWith "// show")
+ if (isReplDebug || isShow) {
+ beSilentDuring(parse(code)) foreach { ts =>
+ ts foreach { t =>
+ withoutUnwrapping(echo(asCompactString(t)))
+ }
}
}
}
@@ -1181,6 +1172,48 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
/** Utility methods for the Interpreter. */
object IMain {
+ class Factory extends ScriptEngineFactory {
+ @BeanProperty
+ val engineName = "Scala Interpreter"
+
+ @BeanProperty
+ val engineVersion = "1.0"
+
+ @BeanProperty
+ val extensions: JList[String] = Arrays.asList("scala")
+
+ @BeanProperty
+ val languageName = "Scala"
+
+ @BeanProperty
+ val languageVersion = versionString
+
+ def getMethodCallSyntax(obj: String, m: String, args: String*): String = null
+
+ @BeanProperty
+ val mimeTypes: JList[String] = Arrays.asList("application/x-scala")
+
+ @BeanProperty
+ val names: JList[String] = Arrays.asList("scala")
+
+ def getOutputStatement(toDisplay: String): String = null
+
+ def getParameter(key: String): Object = key match {
+ case ScriptEngine.ENGINE => engineName
+ case ScriptEngine.ENGINE_VERSION => engineVersion
+ case ScriptEngine.LANGUAGE => languageName
+ case ScriptEngine.LANGUAGE_VERSION => languageVersion
+ case ScriptEngine.NAME => names.get(0)
+ case _ => null
+ }
+
+ def getProgram(statements: String*): String = null
+
+ def getScriptEngine: ScriptEngine = new IMain(this, new Settings() {
+ usemanifestcp.value = true
+ })
+ }
+
// The two name forms this is catching are the two sides of this assignment:
//
// $line3.$read.$iw.$iw.Bippy =
diff --git a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
index a8f77afcdf..9541d08db1 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ISettings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ISettings.scala
@@ -12,13 +12,6 @@ package interpreter
* @author Lex Spoon, 2007/3/24
**/
class ISettings(intp: IMain) {
- /** A list of paths where :load should look */
- var loadPath = List(".")
-
- /** Set this to true to see repl machinery under -Yrich-exceptions.
- */
- var showInternalStackTraces = false
-
/** The maximum length of toString to use when printing the result
* of an evaluation. 0 means no maximum. If a printout requires
* more than this number of characters, then the printout is
@@ -32,7 +25,7 @@ class ISettings(intp: IMain) {
var maxAutoprintCompletion = 250
/** String unwrapping can be disabled if it is causing issues.
- * Settings this to false means you will see Strings like "$iw.$iw.".
+ * Setting this to false means you will see Strings like "$iw.$iw.".
*/
var unwrapStrings = true
@@ -44,7 +37,7 @@ class ISettings(intp: IMain) {
}
def deprecation: Boolean = intp.settings.deprecation.value
- def allSettings = Map(
+ def allSettings = Map[String, Any](
"maxPrintString" -> maxPrintString,
"maxAutoprintCompletion" -> maxAutoprintCompletion,
"unwrapStrings" -> unwrapStrings,
diff --git a/src/compiler/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala
index 73d962b5b0..ff7bfd432c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Imports.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala
@@ -12,12 +12,9 @@ trait Imports {
self: IMain =>
import global._
- import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
+ import definitions.{ ObjectClass, ScalaPackage, JavaLangPackage, PredefModule }
import memberHandlers._
- def isNoImports = settings.noimports.value
- def isNoPredef = settings.nopredef.value
-
/** Synthetic import handlers for the language defined imports. */
private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
val hd :: tl = sym.fullName.split('.').toList map newTermName
@@ -31,12 +28,9 @@ trait Imports {
/** Symbols whose contents are language-defined to be imported. */
def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
- def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
def allImportedNames = importHandlers flatMap (_.importedNames)
- def importedTerms = onlyTerms(allImportedNames)
- def importedTypes = onlyTypes(allImportedNames)
/** Types which have been wildcard imported, such as:
* val x = "abc" ; import x._ // type java.lang.String
@@ -52,17 +46,11 @@ trait Imports {
def sessionWildcards: List[Type] = {
importHandlers filter (_.importsWildcard) map (_.targetType) distinct
}
- def wildcardTypes = languageWildcards ++ sessionWildcards
def languageSymbols = languageWildcardSyms flatMap membersAtPickler
def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
def importedSymbols = languageSymbols ++ sessionImportedSymbols
def importedTermSymbols = importedSymbols collect { case x: TermSymbol => x }
- def importedTypeSymbols = importedSymbols collect { case x: TypeSymbol => x }
- def implicitSymbols = importedSymbols filter (_.isImplicit)
-
- def importedTermNamed(name: String): Symbol =
- importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
/** Tuples of (source, imported symbols) in the order they were imported.
*/
@@ -146,44 +134,42 @@ trait Imports {
code append "object %s {\n".format(impname)
trailingBraces append "}\n"
accessPath append ("." + impname)
-
- currentImps.clear
+ currentImps.clear()
+ }
+ def maybeWrap(names: Name*) = if (names exists currentImps) addWrapper()
+ def wrapBeforeAndAfter[T](op: => T): T = {
+ addWrapper()
+ try op finally addWrapper()
}
-
- addWrapper()
// loop through previous requests, adding imports for each one
- for (ReqAndHandler(req, handler) <- reqsToUse) {
- handler match {
- // If the user entered an import, then just use it; add an import wrapping
- // level if the import might conflict with some other import
- case x: ImportHandler =>
- if (x.importsWildcard || currentImps.exists(x.importedNames contains _))
- addWrapper()
-
- code append (x.member + "\n")
-
- // give wildcard imports a import wrapper all to their own
- if (x.importsWildcard) addWrapper()
- else currentImps ++= x.importedNames
-
- // For other requests, import each defined name.
- // import them explicitly instead of with _, so that
- // ambiguity errors will not be generated. Also, quote
- // the name of the variable, so that we don't need to
- // handle quoting keywords separately.
- case x =>
- for (imv <- x.definedNames) {
- if (currentImps contains imv) addWrapper()
-
- code append ("import " + (req fullPath imv) + "\n")
- currentImps += imv
- }
+ wrapBeforeAndAfter {
+ for (ReqAndHandler(req, handler) <- reqsToUse) {
+ handler match {
+ // If the user entered an import, then just use it; add an import wrapping
+ // level if the import might conflict with some other import
+ case x: ImportHandler if x.importsWildcard =>
+ wrapBeforeAndAfter(code append (x.member + "\n"))
+ case x: ImportHandler =>
+ maybeWrap(x.importedNames: _*)
+ code append (x.member + "\n")
+ currentImps ++= x.importedNames
+
+ // For other requests, import each defined name.
+ // import them explicitly instead of with _, so that
+ // ambiguity errors will not be generated. Also, quote
+ // the name of the variable, so that we don't need to
+ // handle quoting keywords separately.
+ case x =>
+ for (sym <- x.definedSymbols) {
+ maybeWrap(sym.name)
+ code append s"import ${x.path}\n"
+ currentImps += sym.name
+ }
+ }
}
}
- // add one extra wrapper, to prevent warnings in the common case of
- // redefining the value bound in the last interpreter request.
- addWrapper()
+
ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
}
@@ -191,5 +177,5 @@ trait Imports {
prevRequestList flatMap (req => req.handlers map (req -> _))
private def membersAtPickler(sym: Symbol): List[Symbol] =
- beforePickler(sym.info.nonPrivateMembers.toList)
+ enteringPickler(sym.info.nonPrivateMembers.toList)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
index 8331fddca6..28ddf2939c 100644
--- a/src/compiler/scala/tools/nsc/interpreter/InteractiveReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala
@@ -7,8 +7,6 @@ package scala.tools.nsc
package interpreter
import java.io.IOException
-import java.nio.channels.ClosedByInterruptException
-import scala.util.control.Exception._
import session.History
import InteractiveReader._
import Properties.isMac
@@ -17,22 +15,16 @@ import Properties.isMac
trait InteractiveReader {
val interactive: Boolean
- def init(): Unit
def reset(): Unit
-
def history: History
def completion: Completion
- def eraseLine(): Unit
def redrawLine(): Unit
- def currentLine: String
def readYesOrNo(prompt: String, alt: => Boolean): Boolean = readOneKey(prompt) match {
case 'y' => true
case 'n' => false
case _ => alt
}
- def readAssumingNo(prompt: String) = readYesOrNo(prompt, false)
- def readAssumingYes(prompt: String) = readYesOrNo(prompt, true)
protected def readOneLine(prompt: String): String
protected def readOneKey(prompt: String): Int
@@ -52,6 +44,6 @@ object InteractiveReader {
def apply(): InteractiveReader = SimpleReader()
@deprecated("Use `apply` instead.", "2.9.0")
- def createDefault(): InteractiveReader = apply()
+ def createDefault(): InteractiveReader = apply() // used by sbt
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
index 219cb35242..8b8b668c9f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineCompletion.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineCompletion.scala
@@ -6,17 +6,16 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline._
-import scala.tools.jline.console.completer._
import Completion._
import scala.collection.mutable.ListBuffer
+import scala.reflect.internal.util.StringOps.longestCommonPrefix
// REPL completor - queries supplied interpreter for valid
// completions based on current contents of buffer.
class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput {
val global: intp.global.type = intp.global
import global._
- import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+ import definitions._
import rootMirror.{ RootClass, getModuleIfDefined }
type ExecResult = Any
import intp.{ debugging }
@@ -29,9 +28,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (isModule) getModuleIfDefined(name)
else getModuleIfDefined(name)
)
- def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
- def typeOf(name: String) = getType(name, false)
- def moduleOf(name: String) = getType(name, true)
trait CompilerCompletion {
def tp: Type
@@ -43,16 +39,15 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
// for some reason any's members don't show up in subclasses, which
// we need so 5.<tab> offers asInstanceOf etc.
- private def anyMembers = AnyClass.tpe.nonPrivateMembers
+ private def anyMembers = AnyTpe.nonPrivateMembers
def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
def tos(sym: Symbol): String = sym.decodedName
- def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
- def hasMethod(s: String) = memberNamed(s).isMethod
+ def memberNamed(s: String) = exitingTyper(effectiveTp member newTermName(s))
// XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
// compiler to crash for reasons not yet known.
- def members = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
+ def members = exitingTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
def methods = members.toList filter (_.isMethod)
def packages = members.toList filter (_.isPackage)
def aliases = members.toList filter (_.isAliasType)
@@ -111,7 +106,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
def methodSignatureString(sym: Symbol) = {
- IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
+ IMain stripString exitingTyper(new MethodSymbolOutput(sym).methodString())
}
def exclude(name: String): Boolean = (
@@ -157,7 +152,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
}
// not for completion but for excluding
- object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
+ object anyref extends TypeMemberCompletion(AnyRefTpe) { }
// the unqualified vals/defs/etc visible in the repl
object ids extends CompletionAware {
@@ -280,10 +275,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
if (parsed.isEmpty) xs map ("." + _) else xs
}
- // generic interface for querying (e.g. interpreter loop, testing)
- def completions(buf: String): List[String] =
- topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
-
def completer(): ScalaCompleter = new JLineTabCompletion
/** This gets a little bit hairy. It's no small feat delegating everything
@@ -301,16 +292,6 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
def isConsecutiveTabs(buf: String, cursor: Int) =
cursor == lastCursor && buf == lastBuf
- // Longest common prefix
- def commonPrefix(xs: List[String]): String = {
- if (xs.isEmpty || xs.contains("")) ""
- else xs.head.head match {
- case ch =>
- if (xs.tail forall (_.head == ch)) "" + ch + commonPrefix(xs map (_.tail))
- else ""
- }
- }
-
// This is jline's entry point for completion.
override def complete(buf: String, cursor: Int): Candidates = {
verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
@@ -324,7 +305,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
val newCursor =
if (winners contains "") p.cursor
else {
- val advance = commonPrefix(winners)
+ val advance = longestCommonPrefix(winners)
lastCursor = p.position + advance.length
lastBuf = (buf take p.position) + advance
repldbg("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
@@ -335,8 +316,7 @@ class JLineCompletion(val intp: IMain) extends Completion with CompletionOutput
Some(Candidates(newCursor, winners))
}
- def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
- def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
+ def mkDotted = Parsed.dotted(buf, cursor) withVerbosity verbosity
// a single dot is special cased to completion on the previous result
def lastResultCompletion =
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
index 5fd5b41625..5d41f1bbb4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
@@ -9,9 +9,7 @@ package interpreter
import scala.tools.jline.console.ConsoleReader
import scala.tools.jline.console.completer._
import session._
-import scala.collection.JavaConverters._
import Completion._
-import io.Streamable.slurp
/**
* Reads from the console using JLine.
@@ -25,7 +23,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
private def term = consoleReader.getTerminal()
def reset() = term.reset()
- def init() = term.init()
def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
@@ -37,11 +34,11 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ // working around protected/trait/java insufficiencies.
+ def goBack(num: Int): Unit = back(num)
if ((history: History) ne NoHistory)
this setHistory history
- // working around protected/trait/java insufficiencies.
- def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
this.print(prompt)
this.flush()
@@ -49,7 +46,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
def eraseLine() = consoleReader.resetPromptLine("", "", 0)
def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
- // override def readLine(prompt: String): String
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
@@ -66,11 +62,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
}
- def currentLine = consoleReader.getCursorBuffer.buffer.toString
def redrawLine() = consoleReader.redrawLineAndFlush()
- def eraseLine() = consoleReader.eraseLine()
- // Alternate implementation, not sure if/when I need this.
- // def eraseLine() = while (consoleReader.delete()) { }
def readOneLine(prompt: String) = consoleReader readLine prompt
def readOneKey(prompt: String) = consoleReader readOneKey prompt
}
diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
new file mode 100644
index 0000000000..ef6f4c2920
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala
@@ -0,0 +1,692 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package tools.nsc
+package interpreter
+
+import java.lang.{ ClassLoader => JavaClassLoader, Iterable => JIterable }
+import scala.tools.nsc.util.ScalaClassLoader
+import java.io.{ ByteArrayInputStream, CharArrayWriter, FileNotFoundException, PrintWriter, Writer }
+import java.util.{ Locale }
+import java.util.regex.Pattern
+import javax.tools.{ Diagnostic, DiagnosticCollector, DiagnosticListener,
+ ForwardingJavaFileManager, JavaFileManager, JavaFileObject,
+ SimpleJavaFileObject, StandardLocation }
+import scala.reflect.io.{ AbstractFile, Directory, File, Path }
+import scala.io.Source
+import scala.util.{ Try, Success, Failure }
+import scala.util.Properties.lineSeparator
+import scala.collection.JavaConverters
+import scala.collection.generic.Clearable
+import java.net.URL
+import scala.language.reflectiveCalls
+import Javap._
+
+class JavapClass(
+ val loader: ScalaClassLoader,
+ val printWriter: PrintWriter,
+ intp: Option[IMain] = None
+) extends scala.tools.util.Javap {
+ import JavapTool.ToolArgs
+ import JavapClass._
+
+ lazy val tool = JavapTool()
+
+ /** Run the tool. Option args start with "-".
+ * The default options are "-protected -verbose".
+ * Byte data for filename args is retrieved with findBytes.
+ */
+ def apply(args: Seq[String]): List[JpResult] = {
+ val (options, claases) = args partition (s => (s startsWith "-") && s.length > 1)
+ val (flags, upgraded) = upgrade(options)
+ import flags.{ app, fun, help, raw }
+ val targets = if (fun && !help) FunFinder(loader, intp).funs(claases) else claases
+ if (help || claases.isEmpty) List(JpResult(JavapTool.helper(printWriter)))
+ else if (targets.isEmpty) List(JpResult("No anonfuns found."))
+ else tool(raw, upgraded)(targets map (claas => claas -> bytesFor(claas, app)))
+ }
+
+ /** Cull our tool options. */
+ private def upgrade(options: Seq[String]): (ToolArgs, Seq[String]) = ToolArgs fromArgs options match {
+ case (t,s) if s.nonEmpty => (t,s)
+ case (t,s) => (t, JavapTool.DefaultOptions)
+ }
+
+ /** Find bytes. Handle "-", "-app", "Foo#bar" (by ignoring member), "#bar" (by taking "bar"). */
+ private def bytesFor(path: String, app: Boolean) = Try {
+ def last = intp.get.mostRecentVar // fail if no intp
+ def req = if (path == "-") last else {
+ val s = path.splitHashMember
+ if (s._1.nonEmpty) s._1
+ else s._2 getOrElse "#"
+ }
+ def asAppBody(s: String) = {
+ val (cls, fix) = s.splitSuffix
+ s"${cls}$$delayedInit$$body${fix}"
+ }
+ def todo = if (app) asAppBody(req) else req
+ val bytes = findBytes(todo)
+ if (bytes.isEmpty) throw new FileNotFoundException(s"Could not find class bytes for '${path}'")
+ else bytes
+ }
+
+ def findBytes(path: String): Array[Byte] = tryFile(path) getOrElse tryClass(path)
+
+ /** Assume the string is a path and try to find the classfile
+ * it represents.
+ */
+ def tryFile(path: String): Option[Array[Byte]] =
+ (Try (File(path.asClassResource)) filter (_.exists) map (_.toByteArray())).toOption
+
+ /** Assume the string is a fully qualified class name and try to
+ * find the class object it represents.
+ * There are other symbols of interest, too:
+ * - a definition that is wrapped in an enclosing class
+ * - a synthetic that is not in scope but its associated class is
+ */
+ def tryClass(path: String): Array[Byte] = {
+ def load(name: String) = loader classBytes name
+ def loadable(name: String) = loader resourceable name
+ // if path has an interior dollar, take it as a synthetic
+ // if the prefix up to the dollar is a symbol in scope,
+ // result is the translated prefix + suffix
+ def desynthesize(s: String) = {
+ val i = s indexOf '$'
+ if (0 until s.length - 1 contains i) {
+ val name = s substring (0, i)
+ val sufx = s substring i
+ val tran = intp flatMap (_ translatePath name)
+ def loadableOrNone(strip: Boolean) = {
+ def suffix(strip: Boolean)(x: String) =
+ (if (strip && (x endsWith "$")) x.init else x) + sufx
+ val res = tran map (suffix(strip) _)
+ if (res.isDefined && loadable(res.get)) res else None
+ }
+ // try loading translated+suffix
+ val res = loadableOrNone(strip = false)
+ // some synthetics lack a dollar, (e.g., suffix = delayedInit$body)
+ // so as a hack, if prefix$$suffix fails, also try prefix$suffix
+ if (res.isDefined) res else loadableOrNone(strip = true)
+ } else None
+ }
+ val p = path.asClassName // scrub any suffix
+ // if repl, translate the name to something replish
+ // (for translate, would be nicer to get the sym and ask .isClass,
+ // instead of translatePath and then asking did I get a class back)
+ val q = if (intp.isEmpty) p else (
+ // only simple names get the scope treatment
+ Some(p) filter (_ contains '.')
+ // take path as a Name in scope
+ orElse (intp flatMap (_ translatePath p) filter loadable)
+ // take path as a Name in scope and find its enclosing class
+ orElse (intp flatMap (_ translateEnclosingClass p) filter loadable)
+ // take path as a synthetic derived from some Name in scope
+ orElse desynthesize(p)
+ // just try it plain
+ getOrElse p
+ )
+ load(q)
+ }
+
+ /** Base class for javap tool adapters for java 6 and 7. */
+ abstract class JavapTool {
+ type ByteAry = Array[Byte]
+ type Input = Pair[String, Try[ByteAry]]
+
+ /** Run the tool. */
+ def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult]
+
+ // Since the tool is loaded by reflection, check for catastrophic failure.
+ protected def failed: Boolean
+ implicit protected class Failer[A](a: =>A) {
+ def orFailed[B >: A](b: =>B) = if (failed) b else a
+ }
+ protected def noToolError = new JpError(s"No javap tool available: ${getClass.getName} failed to initialize.")
+
+ // output filtering support
+ val writer = new CharArrayWriter
+ def written = {
+ writer.flush()
+ val w = writer.toString
+ writer.reset()
+ w
+ }
+
+ /** Create a Showable with output massage.
+ * @param raw show ugly repl names
+ * @param target attempt to filter output to show region of interest
+ * @param preamble other messages to output
+ */
+ def showWithPreamble(raw: Boolean, target: String, preamble: String = ""): Showable = new Showable {
+ // ReplStrippingWriter clips and scrubs on write(String)
+ // circumvent it by write(mw, 0, mw.length) or wrap it in withoutUnwrapping
+ def show() =
+ if (raw && intp.isDefined) intp.get withoutUnwrapping { writeLines() }
+ else writeLines()
+ private def writeLines() {
+ // take Foo# as Foo#apply for purposes of filtering. Useful for -fun Foo#;
+ // if apply is added here, it's for other than -fun: javap Foo#, perhaps m#?
+ val filterOn = target.splitHashMember._2 map { s => if (s.isEmpty) "apply" else s }
+ var filtering = false // true if in region matching filter
+ // true to output
+ def checkFilter(line: String) = if (filterOn.isEmpty) true else {
+ // cheap heuristic, todo maybe parse for the java sig.
+ // method sigs end in paren semi
+ def isAnyMethod = line.endsWith(");")
+ def isOurMethod = {
+ val lparen = line.lastIndexOf('(')
+ val blank = line.lastIndexOf(' ', lparen)
+ (blank >= 0 && line.substring(blank+1, lparen) == filterOn.get)
+ }
+ filtering = if (filtering) {
+ // next blank line terminates section
+ // for -public, next line is next method, more or less
+ line.trim.nonEmpty && !isAnyMethod
+ } else {
+ isAnyMethod && isOurMethod
+ }
+ filtering
+ }
+ for (line <- Source.fromString(preamble + written).getLines(); if checkFilter(line))
+ printWriter write line+lineSeparator
+ printWriter.flush()
+ }
+ }
+ }
+
+ class JavapTool6 extends JavapTool {
+ import JavapTool._
+ val EnvClass = loader.tryToInitializeClass[FakeEnvironment](Env).orNull
+ val PrinterClass = loader.tryToInitializeClass[FakePrinter](Printer).orNull
+ override protected def failed = (EnvClass eq null) || (PrinterClass eq null)
+
+ val PrinterCtr = PrinterClass.getConstructor(classOf[InputStream], classOf[PrintWriter], EnvClass) orFailed null
+ val printWrapper = new PrintWriter(writer)
+ def newPrinter(in: InputStream, env: FakeEnvironment): FakePrinter =
+ PrinterCtr.newInstance(in, printWrapper, env) orFailed null
+ def showable(raw: Boolean, target: String, fp: FakePrinter): Showable = {
+ fp.asInstanceOf[{ def print(): Unit }].print() // run tool and flush to buffer
+ printWrapper.flush() // just in case
+ showWithPreamble(raw, target)
+ }
+
+ lazy val parser = new JpOptions
+ def newEnv(opts: Seq[String]): FakeEnvironment = {
+ def result = {
+ val env: FakeEnvironment = EnvClass.newInstance()
+ parser(opts) foreach { case (name, value) =>
+ val field = EnvClass getDeclaredField name
+ field setAccessible true
+ field.set(env, value.asInstanceOf[AnyRef])
+ }
+ env
+ }
+ result orFailed null
+ }
+
+ override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] =
+ (inputs map {
+ case (claas, Success(ba)) => JpResult(showable(raw, claas, newPrinter(new ByteArrayInputStream(ba), newEnv(options))))
+ case (_, Failure(e)) => JpResult(e.toString)
+ }).toList orFailed List(noToolError)
+ }
+
+ class JavapTool7 extends JavapTool {
+ type Task = {
+ def call(): Boolean // true = ok
+ //def run(args: Array[String]): Int // all args
+ //def handleOptions(args: Array[String]): Unit // options, then run() or call()
+ }
+ // result of Task.run
+ //object TaskResult extends Enumeration {
+ // val Ok, Error, CmdErr, SysErr, Abnormal = Value
+ //}
+ val TaskClaas = loader.tryToInitializeClass[Task](JavapTool.Tool).orNull
+ override protected def failed = TaskClaas eq null
+
+ val TaskCtor = TaskClaas.getConstructor(
+ classOf[Writer],
+ classOf[JavaFileManager],
+ classOf[DiagnosticListener[_]],
+ classOf[JIterable[String]],
+ classOf[JIterable[String]]
+ ) orFailed null
+
+ class JavaReporter extends DiagnosticListener[JavaFileObject] with Clearable {
+ import scala.collection.mutable.{ ArrayBuffer, SynchronizedBuffer }
+ type D = Diagnostic[_ <: JavaFileObject]
+ val diagnostics = new ArrayBuffer[D] with SynchronizedBuffer[D]
+ override def report(d: Diagnostic[_ <: JavaFileObject]) {
+ diagnostics += d
+ }
+ override def clear() = diagnostics.clear()
+ /** All diagnostic messages.
+ * @param locale Locale for diagnostic messages, null by default.
+ */
+ def messages(implicit locale: Locale = null) = (diagnostics map (_ getMessage locale)).toList
+
+ def reportable(raw: Boolean): String = {
+ // don't filter this message if raw, since the names are likely to differ
+ val container = "Binary file .* contains .*".r
+ val m = if (raw) messages
+ else messages filter (_ match { case container() => false case _ => true })
+ clear()
+ if (m.nonEmpty) m mkString ("", lineSeparator, lineSeparator)
+ else ""
+ }
+ }
+ val reporter = new JavaReporter
+
+ // DisassemblerTool.getStandardFileManager(reporter,locale,charset)
+ val defaultFileManager: JavaFileManager =
+ (loader.tryToLoadClass[JavaFileManager]("com.sun.tools.javap.JavapFileManager").get getMethod (
+ "create",
+ classOf[DiagnosticListener[_]],
+ classOf[PrintWriter]
+ ) invoke (null, reporter, new PrintWriter(System.err, true))).asInstanceOf[JavaFileManager] orFailed null
+
+ // manages named arrays of bytes, which might have failed to load
+ class JavapFileManager(val managed: Seq[Input])(delegate: JavaFileManager = defaultFileManager)
+ extends ForwardingJavaFileManager[JavaFileManager](delegate) {
+ import JavaFileObject.Kind
+ import Kind._
+ import StandardLocation._
+ import JavaFileManager.Location
+ import java.net.URI
+ def uri(name: String): URI = new URI(name) // new URI("jfo:" + name)
+
+ def inputNamed(name: String): Try[ByteAry] = (managed find (_._1 == name)).get._2
+ def managedFile(name: String, kind: Kind) = kind match {
+ case CLASS => fileObjectForInput(name, inputNamed(name), kind)
+ case _ => null
+ }
+ // todo: just wrap it as scala abstractfile and adapt it uniformly
+ def fileObjectForInput(name: String, bytes: Try[ByteAry], kind: Kind): JavaFileObject =
+ new SimpleJavaFileObject(uri(name), kind) {
+ override def openInputStream(): InputStream = new ByteArrayInputStream(bytes.get)
+ // if non-null, ClassWriter wrongly requires scheme non-null
+ override def toUri: URI = null
+ override def getName: String = name
+ // suppress
+ override def getLastModified: Long = -1L
+ }
+ override def getJavaFileForInput(location: Location, className: String, kind: Kind): JavaFileObject =
+ location match {
+ case CLASS_PATH => managedFile(className, kind)
+ case _ => null
+ }
+ override def hasLocation(location: Location): Boolean =
+ location match {
+ case CLASS_PATH => true
+ case _ => false
+ }
+ }
+ def fileManager(inputs: Seq[Input]) = new JavapFileManager(inputs)()
+
+ // show tool messages and tool output, with output massage
+ def showable(raw: Boolean, target: String): Showable = showWithPreamble(raw, target, reporter.reportable(raw))
+
+ // eventually, use the tool interface
+ def task(options: Seq[String], claases: Seq[String], inputs: Seq[Input]): Task = {
+ //ServiceLoader.load(classOf[javax.tools.DisassemblerTool]).
+ //getTask(writer, fileManager, reporter, options.asJava, claases.asJava)
+ import JavaConverters.asJavaIterableConverter
+ TaskCtor.newInstance(writer, fileManager(inputs), reporter, options.asJava, claases.asJava)
+ .orFailed (throw new IllegalStateException)
+ }
+ // a result per input
+ private def applyOne(raw: Boolean, options: Seq[String], claas: String, inputs: Seq[Input]): Try[JpResult] =
+ Try {
+ task(options, Seq(claas), inputs).call()
+ } map {
+ case true => JpResult(showable(raw, claas))
+ case _ => JpResult(reporter.reportable(raw))
+ } recoverWith {
+ case e: java.lang.reflect.InvocationTargetException => e.getCause match {
+ case t: IllegalArgumentException => Success(JpResult(t.getMessage)) // bad option
+ case x => Failure(x)
+ }
+ } lastly {
+ reporter.clear()
+ }
+ override def apply(raw: Boolean, options: Seq[String])(inputs: Seq[Input]): List[JpResult] = (inputs map {
+ case (claas, Success(_)) => applyOne(raw, options, claas, inputs).get
+ case (_, Failure(e)) => JpResult(e.toString)
+ }).toList orFailed List(noToolError)
+ }
+
+ object JavapTool {
+ // >= 1.7
+ val Tool = "com.sun.tools.javap.JavapTask"
+
+ // < 1.7
+ val Env = "sun.tools.javap.JavapEnvironment"
+ val Printer = "sun.tools.javap.JavapPrinter"
+ // "documentation"
+ type FakeEnvironment = AnyRef
+ type FakePrinter = AnyRef
+
+ // support JavapEnvironment
+ class JpOptions {
+ private object Access {
+ final val PRIVATE = 0
+ final val PROTECTED = 1
+ final val PACKAGE = 2
+ final val PUBLIC = 3
+ }
+ private val envActionMap: Map[String, (String, Any)] = {
+ val map = Map(
+ "-l" -> (("showLineAndLocal", true)),
+ "-c" -> (("showDisassembled", true)),
+ "-s" -> (("showInternalSigs", true)),
+ "-verbose" -> (("showVerbose", true)),
+ "-private" -> (("showAccess", Access.PRIVATE)),
+ "-package" -> (("showAccess", Access.PACKAGE)),
+ "-protected" -> (("showAccess", Access.PROTECTED)),
+ "-public" -> (("showAccess", Access.PUBLIC)),
+ "-all" -> (("showallAttr", true))
+ )
+ map ++ List(
+ "-v" -> map("-verbose"),
+ "-p" -> map("-private")
+ )
+ }
+ def apply(opts: Seq[String]): Seq[(String, Any)] = {
+ opts flatMap { opt =>
+ envActionMap get opt match {
+ case Some(pair) => List(pair)
+ case _ =>
+ val charOpts = opt.tail.toSeq map ("-" + _)
+ if (charOpts forall (envActionMap contains _))
+ charOpts map envActionMap
+ else Nil
+ }
+ }
+ }
+ }
+
+ case class ToolArgs(raw: Boolean = false, help: Boolean = false, app: Boolean = false, fun: Boolean = false)
+
+ object ToolArgs {
+ def fromArgs(args: Seq[String]): (ToolArgs, Seq[String]) = ((ToolArgs(), Seq[String]()) /: (args flatMap massage)) {
+ case ((t,others), s) => s match {
+ case "-fun" => (t copy (fun=true), others)
+ case "-app" => (t copy (app=true), others)
+ case "-help" => (t copy (help=true), others)
+ case "-raw" => (t copy (raw=true), others)
+ case _ => (t, others :+ s)
+ }
+ }
+ }
+
+ val helps = List(
+ "usage" -> ":javap [opts] [path or class or -]...",
+ "-help" -> "Prints this help message",
+ "-raw" -> "Don't unmangle REPL names",
+ "-app" -> "Show the DelayedInit body of Apps",
+ "-fun" -> "Show anonfuns for class or Class#method",
+ "-verbose/-v" -> "Stack size, number of locals, method args",
+ "-private/-p" -> "Private classes and members",
+ "-package" -> "Package-private classes and members",
+ "-protected" -> "Protected classes and members",
+ "-public" -> "Public classes and members",
+ "-l" -> "Line and local variable tables",
+ "-c" -> "Disassembled code",
+ "-s" -> "Internal type signatures",
+ "-sysinfo" -> "System info of class",
+ "-constants" -> "Static final constants"
+ )
+
+ // match prefixes and unpack opts, or -help on failure
+ def massage(arg: String): Seq[String] = {
+ require(arg startsWith "-")
+ // arg matches opt "-foo/-f" if prefix of -foo or exactly -f
+ val r = """(-[^/]*)(/(-.))?""".r
+ def maybe(opt: String, s: String): Option[String] = opt match {
+ // disambiguate by preferring short form
+ case r(lf,_,sf) if s == sf => Some(sf)
+ case r(lf,_,sf) if lf startsWith s => Some(lf)
+ case _ => None
+ }
+ def candidates(s: String) = (helps map (h => maybe(h._1, s))).flatten
+ // one candidate or one single-char candidate
+ def uniqueOf(maybes: Seq[String]) = {
+ def single(s: String) = s.length == 2
+ if (maybes.length == 1) maybes
+ else if ((maybes count single) == 1) maybes filter single
+ else Nil
+ }
+ // each optchar must decode to exactly one option
+ def unpacked(s: String): Try[Seq[String]] = {
+ val ones = (s drop 1) map { c =>
+ val maybes = uniqueOf(candidates(s"-$c"))
+ if (maybes.length == 1) Some(maybes.head) else None
+ }
+ Try(ones) filter (_ forall (_.isDefined)) map (_.flatten)
+ }
+ val res = uniqueOf(candidates(arg))
+ if (res.nonEmpty) res
+ else (unpacked(arg)
+ getOrElse (Seq("-help"))) // or else someone needs help
+ }
+
+ def helper(pw: PrintWriter) = new Showable {
+ def show() = helps foreach (p => pw write "%-12.12s%s%n".format(p._1,p._2))
+ }
+
+ val DefaultOptions = List("-protected", "-verbose")
+
+ def isAvailable = Seq(Env, Tool) exists (cn => hasClass(loader, cn))
+
+ private def hasClass(cl: ScalaClassLoader, cn: String) = cl.tryToInitializeClass[AnyRef](cn).isDefined
+
+ private def isTaskable(cl: ScalaClassLoader) = hasClass(cl, Tool)
+
+ def apply() = if (isTaskable(loader)) new JavapTool7 else new JavapTool6
+ }
+}
+
+object JavapClass {
+ def apply(
+ loader: ScalaClassLoader = ScalaClassLoader.appLoader,
+ printWriter: PrintWriter = new PrintWriter(System.out, true),
+ intp: Option[IMain] = None
+ ) = new JavapClass(loader, printWriter, intp)
+
+ // We enjoy flexibility in specifying either a fully-qualified class name com.acme.Widget
+ // or a resource path com/acme/Widget.class; but not widget.out
+ implicit class MaybeClassLike(val s: String) extends AnyVal {
+ /* private[this] final val suffix = ".class" */
+ private def suffix = ".class"
+ def asClassName = (s stripSuffix suffix).replace('/', '.')
+ def asClassResource = if (s endsWith suffix) s else s.replace('.', '/') + suffix
+ def splitSuffix: (String, String) = if (s endsWith suffix) (s dropRight suffix.length, suffix) else (s, "")
+ def strippingSuffix(f: String => String): String =
+ if (s endsWith suffix) f(s dropRight suffix.length) else s
+ // e.g. Foo#bar. Foo# yields zero-length member part.
+ def splitHashMember: (String, Option[String]) = {
+ val i = s lastIndexOf '#'
+ if (i < 0) (s, None)
+ //else if (i >= s.length - 1) (s.init, None)
+ else (s take i, Some(s drop i+1))
+ }
+ }
+ implicit class ClassLoaderOps(val cl: ClassLoader) extends AnyVal {
+ private def parentsOf(x: ClassLoader): List[ClassLoader] = if (x == null) Nil else x :: parentsOf(x.getParent)
+ def parents: List[ClassLoader] = parentsOf(cl)
+ /* all file locations */
+ def locations = {
+ def alldirs = parents flatMap (_ match {
+ case ucl: ScalaClassLoader.URLClassLoader => ucl.classPathURLs
+ case jcl: java.net.URLClassLoader => jcl.getURLs
+ case _ => Nil
+ })
+ val dirs = for (d <- alldirs; if d.getProtocol == "file") yield Path(new JFile(d.toURI))
+ dirs
+ }
+ /* only the file location from which the given class is loaded */
+ def locate(k: String): Option[Path] = {
+ Try {
+ val claas = try cl loadClass k catch {
+ case _: NoClassDefFoundError => null // let it snow
+ }
+ // cf ScalaClassLoader.originOfClass
+ claas.getProtectionDomain.getCodeSource.getLocation
+ } match {
+ case Success(null) => None
+ case Success(loc) if loc.isFile => Some(Path(new JFile(loc.toURI)))
+ case _ => None
+ }
+ }
+ /* would classBytes succeed with a nonempty array */
+ def resourceable(className: String): Boolean = cl.getResource(className.asClassResource) != null
+ }
+ implicit class PathOps(val p: Path) extends AnyVal {
+ import scala.tools.nsc.io.Jar
+ def isJar = Jar isJarOrZip p
+ }
+ implicit class URLOps(val url: URL) extends AnyVal {
+ def isFile: Boolean = url.getProtocol == "file"
+ }
+ object FunFinder {
+ def apply(loader: ScalaClassLoader, intp: Option[IMain]) = new FunFinder(loader, intp)
+ }
+ class FunFinder(loader: ScalaClassLoader, intp: Option[IMain]) {
+
+ // class k, candidate f without prefix
+ def isFunOfClass(k: String, f: String) = {
+ val p = (s"${Pattern quote k}\\$$+anonfun").r
+ (p findPrefixOf f).nonEmpty
+ }
+ // class k, candidate f without prefix, method m
+ def isFunOfMethod(k: String, m: String, f: String) = {
+ val p = (s"${Pattern quote k}\\$$+anonfun\\$$${Pattern quote m}\\$$").r
+ (p findPrefixOf f).nonEmpty
+ }
+ def isFunOfTarget(k: String, m: Option[String], f: String) =
+ if (m.isEmpty) isFunOfClass(k, f)
+ else isFunOfMethod(k, m.get, f)
+ def listFunsInAbsFile(k: String, m: Option[String], d: AbstractFile) = {
+ for (f <- d; if !f.isDirectory && isFunOfTarget(k, m, f.name)) yield f.name
+ }
+ // path prefix p, class k, dir d
+ def listFunsInDir(p: String, k: String, m: Option[String])(d: Directory) = {
+ val subdir = Path(p)
+ for (f <- (d / subdir).toDirectory.list; if f.isFile && isFunOfTarget(k, m, f.name))
+ yield f.name
+ }
+ // path prefix p, class k, jar file f
+ def listFunsInJar(p: String, k: String, m: Option[String])(f: File) = {
+ import java.util.jar.JarEntry
+ import scala.tools.nsc.io.Jar
+ def maybe(e: JarEntry) = {
+ val (path, name) = {
+ val parts = e.getName split "/"
+ if (parts.length < 2) ("", e.getName)
+ else (parts.init mkString "/", parts.last)
+ }
+ if (path == p && isFunOfTarget(k, m, name)) Some(name) else None
+ }
+ (new Jar(f) map maybe).flatten
+ }
+ def loadable(name: String) = loader resourceable name
+ // translated class, optional member, opt member to filter on, whether it is repl output
+ def translate(s: String): (String, Option[String], Option[String], Boolean) = {
+ val (k0, m0) = s.splitHashMember
+ val k = k0.asClassName
+ val member = m0 filter (_.nonEmpty) // take Foo# as no member, not ""
+ val filter = m0 flatMap { case "" => Some("apply") case _ => None } // take Foo# as filter on apply
+ // class is either something replish or available to loader
+ // $line.$read$$etc$Foo#member
+ ((intp flatMap (_ translatePath k) filter (loadable) map ((_, member, filter, true)))
+ // s = "f" and $line.$read$$etc$#f is what we're after,
+ // ignoring any #member (except take # as filter on #apply)
+ orElse (intp flatMap (_ translateEnclosingClass k) map ((_, Some(k), filter, true)))
+ getOrElse ((k, member, filter, false)))
+ }
+ /** Find the classnames of anonfuns associated with k,
+ * where k may be an available class or a symbol in scope.
+ */
+ def funsOf(k0: String): Seq[String] = {
+ // class is either something replish or available to loader
+ val (k, member, filter, isReplish) = translate(k0)
+ val splat = k split "\\."
+ val name = splat.last
+ val prefix = if (splat.length > 1) splat.init mkString "/" else ""
+ val pkg = if (splat.length > 1) splat.init mkString "." else ""
+ // reconstitute an anonfun with a package
+ // if filtered, add the hash back, e.g. pkg.Foo#bar, pkg.Foo$anon$1#apply
+ def packaged(s: String) = {
+ val p = if (pkg.isEmpty) s else s"$pkg.$s"
+ val pm = filter map (p + "#" + _)
+ pm getOrElse p
+ }
+ // is this translated path in (usually virtual) repl outdir? or loadable from filesystem?
+ val fs = if (isReplish) {
+ def outed(d: AbstractFile, p: Seq[String]): Option[AbstractFile] = {
+ if (p.isEmpty) Option(d)
+ else Option(d.lookupName(p.head, directory = true)) flatMap (f => outed(f, p.tail))
+ }
+ outed(intp.get.replOutput.dir, splat.init) map { d =>
+ listFunsInAbsFile(name, member, d) map packaged
+ }
+ } else {
+ loader locate k map { w =>
+ if (w.isDirectory) listFunsInDir(prefix, name, member)(w.toDirectory) map packaged
+ else if (w.isJar) listFunsInJar(prefix, name, member)(w.toFile) map packaged
+ else Nil
+ }
+ }
+ fs match {
+ case Some(xs) => xs.to[Seq] // maybe empty
+ case None => Seq() // nothing found, e.g., junk input
+ }
+ }
+ def funs(ks: Seq[String]) = ks flatMap funsOf _
+ }
+}
+
+object Javap {
+ def isAvailable(cl: ScalaClassLoader = ScalaClassLoader.appLoader) = JavapClass(cl).JavapTool.isAvailable
+
+ def apply(path: String): Unit = apply(Seq(path))
+ def apply(args: Seq[String]): Unit = JavapClass() apply args foreach (_.show())
+
+ trait Showable {
+ def show(): Unit
+ }
+
+ sealed trait JpResult extends scala.tools.util.JpResult {
+ type ResultType
+ def isError: Boolean
+ def value: ResultType
+ def show(): Unit
+ // todo
+ // def header(): String
+ // def fields(): List[String]
+ // def methods(): List[String]
+ // def signatures(): List[String]
+ }
+ object JpResult {
+ def apply(msg: String) = new JpError(msg)
+ def apply(res: Showable) = new JpSuccess(res)
+ }
+ class JpError(msg: String) extends JpResult {
+ type ResultType = String
+ def isError = true
+ def value = msg
+ def show() = println(msg) // makes sense for :javap, less for -Ygen-javap
+ }
+ class JpSuccess(val value: Showable) extends JpResult {
+ type ResultType = AnyRef
+ def isError = false
+ def show() = value.show() // output to tool's PrintWriter
+ }
+ implicit class Lastly[A](val t: Try[A]) extends AnyVal {
+ private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t }
+ def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Logger.scala b/src/repl/scala/tools/nsc/interpreter/Logger.scala
index aeb25fc688..7407daf8d0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Logger.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Logger.scala
@@ -11,8 +11,4 @@ trait Logger {
def isDebug: Boolean
def isTrace: Boolean
def out: JPrintWriter
-
- def info(msg: => Any): Unit = if (isInfo) out println msg
- def debug(msg: => Any): Unit = if (isDebug) out println msg
- def trace(msg: => Any): Unit = if (isTrace) out println msg
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
index 60325ece30..12d6ee5112 100644
--- a/src/compiler/scala/tools/nsc/interpreter/LoopCommands.scala
+++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala
@@ -3,7 +3,9 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package interpreter
import scala.collection.{ mutable, immutable }
@@ -19,13 +21,8 @@ class ProcessResult(val line: String) {
val exitCode = builder ! logger
def lines = buffer.toList
- def show() = lines foreach println
override def toString = "`%s` (%d lines, exit %d)".format(line, buffer.size, exitCode)
}
-object ProcessResult {
- implicit def processResultToOutputLines(pr: ProcessResult): List[String] = pr.lines
- def apply(line: String): ProcessResult = new ProcessResult(line)
-}
trait LoopCommands {
protected def out: JPrintWriter
@@ -35,14 +32,6 @@ trait LoopCommands {
// a single interpreter command
abstract class LoopCommand(val name: String, val help: String) extends (String => Result) {
- private var _longHelp: String = null
- final def defaultHelp = usageMsg + " (no extended help available.)"
- def hasLongHelp = _longHelp != null || longHelp != defaultHelp
- def withLongHelp(text: String): this.type = { _longHelp = text ; this }
- def longHelp = _longHelp match {
- case null => defaultHelp
- case text => text
- }
def usage: String = ""
def usageMsg: String = ":" + name + (
if (usage == "") "" else " " + usage
@@ -52,12 +41,7 @@ trait LoopCommands {
// called if no args are given
def showUsage(): Result = {
"usage is " + usageMsg
- Result(true, None)
- }
-
- def onError(msg: String) = {
- out.println("error: " + msg)
- showUsage()
+ Result(keepRunning = true, None)
}
}
object LoopCommand {
@@ -67,9 +51,6 @@ trait LoopCommands {
def cmd(name: String, usage: String, help: String, f: String => Result): LoopCommand =
if (usage == "") new NullaryCmd(name, help, f)
else new LineCmd(name, usage, help, f)
-
- def varargs(name: String, usage: String, help: String, f: List[String] => Result): LoopCommand =
- new VarArgsCmd(name, usage, help, f)
}
class NullaryCmd(name: String, help: String, f: String => Result) extends LoopCommand(name, help) {
@@ -89,11 +70,11 @@ trait LoopCommands {
}
// the result of a single command
- case class Result(val keepRunning: Boolean, val lineToRecord: Option[String])
+ case class Result(keepRunning: Boolean, lineToRecord: Option[String])
object Result {
// the default result means "keep running, and don't record that line"
- val default = Result(true, None)
+ val default = Result(keepRunning = true, None)
// most commands do not want to micromanage the Result, but they might want
// to print something to the console, so we accomodate Unit and String returns.
diff --git a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
index 67519cf90c..c6f0cca481 100644
--- a/src/compiler/scala/tools/nsc/interpreter/MemberHandlers.scala
+++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala
@@ -7,9 +7,6 @@ package scala.tools.nsc
package interpreter
import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
-import scala.reflect.internal.Chars
-import scala.reflect.internal.Flags._
import scala.language.implicitConversions
trait MemberHandlers {
@@ -21,8 +18,6 @@ trait MemberHandlers {
private def codegenln(leadingPlus: Boolean, xs: String*): String = codegen(leadingPlus, (xs ++ Array("\n")): _*)
private def codegenln(xs: String*): String = codegenln(true, xs: _*)
-
- private def codegen(xs: String*): String = codegen(true, xs: _*)
private def codegen(leadingPlus: Boolean, xs: String*): String = {
val front = if (leadingPlus) "+ " else ""
front + (xs map string2codeQuoted mkString " + ")
@@ -52,24 +47,26 @@ trait MemberHandlers {
}
}
+ private def isTermMacro(ddef: DefDef): Boolean = ddef.mods.isMacro
+
def chooseHandler(member: Tree): MemberHandler = member match {
- case member: DefDef => new DefHandler(member)
- case member: ValDef => new ValHandler(member)
- case member: Assign => new AssignHandler(member)
- case member: ModuleDef => new ModuleHandler(member)
- case member: ClassDef => new ClassHandler(member)
- case member: TypeDef => new TypeAliasHandler(member)
- case member: Import => new ImportHandler(member)
- case DocDef(_, documented) => chooseHandler(documented)
- case member => new GenericHandler(member)
+ case member: DefDef if isTermMacro(member) => new TermMacroHandler(member)
+ case member: DefDef => new DefHandler(member)
+ case member: ValDef => new ValHandler(member)
+ case member: ModuleDef => new ModuleHandler(member)
+ case member: ClassDef => new ClassHandler(member)
+ case member: TypeDef => new TypeAliasHandler(member)
+ case member: Assign => new AssignHandler(member)
+ case member: Import => new ImportHandler(member)
+ case DocDef(_, documented) => chooseHandler(documented)
+ case member => new GenericHandler(member)
}
sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
- def symbol = if (member.symbol eq null) NoSymbol else member.symbol
- def name: Name = member.name
- def mods: Modifiers = member.mods
- def keyword = member.keyword
- def prettyName = name.decode
+ override def name: Name = member.name
+ def mods: Modifiers = member.mods
+ def keyword = member.keyword
+ def prettyName = name.decode
override def definesImplicit = member.mods.isImplicit
override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
@@ -81,9 +78,11 @@ trait MemberHandlers {
* in a single interpreter request.
*/
sealed abstract class MemberHandler(val member: Tree) {
+ def name: Name = nme.NO_NAME
+ def path = intp.originalPath(symbol)
+ def symbol = if (member.symbol eq null) NoSymbol else member.symbol
def definesImplicit = false
def definesValue = false
- def isLegalTopLevel = false
def definesTerm = Option.empty[TermName]
def definesType = Option.empty[TypeName]
@@ -91,7 +90,6 @@ trait MemberHandlers {
lazy val referencedNames = ImportVarsTraverser(member)
def importedNames = List[Name]()
def definedNames = definesTerm.toList ++ definesType.toList
- def definedOrImported = definedNames ++ importedNames
def definedSymbols = List[Symbol]()
def extraCodeToEvaluate(req: Request): String = ""
@@ -114,10 +112,10 @@ trait MemberHandlers {
// if this is a lazy val we avoid evaluating it here
val resultString =
if (mods.isLazy) codegenln(false, "<lazy>")
- else any2stringOf(req fullPath name, maxStringElements)
+ else any2stringOf(path, maxStringElements)
val vidString =
- if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
+ if (replProps.vids) s"""" + " @ " + "%%8x".format(System.identityHashCode($path)) + " """.trim
else ""
""" + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
@@ -126,17 +124,26 @@ trait MemberHandlers {
}
class DefHandler(member: DefDef) extends MemberDefHandler(member) {
- private def vparamss = member.vparamss
- private def isMacro = member.symbol hasFlag MACRO
- // true if not a macro and 0-arity
- override def definesValue = !isMacro && flattensToEmpty(vparamss)
+ override def definesValue = flattensToEmpty(member.vparamss) // true if 0-arity
override def resultExtractionCode(req: Request) =
if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
}
+ abstract class MacroHandler(member: DefDef) extends MemberDefHandler(member) {
+ override def definesValue = false
+ override def definesTerm: Option[TermName] = Some(name.toTermName)
+ override def definesType: Option[TypeName] = None
+ override def resultExtractionCode(req: Request) = if (mods.isPublic) codegenln(notification(req)) else ""
+ def notification(req: Request): String
+ }
+
+ class TermMacroHandler(member: DefDef) extends MacroHandler(member) {
+ def notification(req: Request) = s"defined term macro $name: ${req.typeOf(name)}"
+ }
+
class AssignHandler(member: Assign) extends MemberHandler(member) {
val Assign(lhs, rhs) = member
- val name = newTermName(freshInternalVarName())
+ override lazy val name = newTermName(freshInternalVarName())
override def definesTerm = Some(name)
override def definesValue = true
@@ -152,17 +159,16 @@ trait MemberHandlers {
}
class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
- override def definesTerm = Some(name)
+ override def definesTerm = Some(name.toTermName)
override def definesValue = true
- override def isLegalTopLevel = true
- override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
+ override def resultExtractionCode(req: Request) = codegenln("defined object ", name)
}
class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
+ override def definedSymbols = List(symbol, symbol.companionSymbol) filterNot (_ == NoSymbol)
override def definesType = Some(name.toTypeName)
override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
- override def isLegalTopLevel = true
override def resultExtractionCode(req: Request) =
codegenln("defined %s %s".format(keyword, name))
@@ -178,21 +184,11 @@ trait MemberHandlers {
class ImportHandler(imp: Import) extends MemberHandler(imp) {
val Import(expr, selectors) = imp
- def targetType: Type = intp.typeOfExpression("" + expr)
- override def isLegalTopLevel = true
-
- def createImportForName(name: Name): String = {
- selectors foreach {
- case sel @ ImportSelector(old, _, `name`, _) => return "import %s.{ %s }".format(expr, sel)
- case _ => ()
- }
- "import %s.%s".format(expr, name)
+ def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match {
+ case NoSymbol => intp.typeOfExpression("" + expr)
+ case sym => sym.thisType
}
- // TODO: Need to track these specially to honor Predef masking attempts,
- // because they must be the leading imports in the code generated for each
- // line. We can use the same machinery as Contexts now, anyway.
- def isPredefImport = isReferenceToPredef(expr)
-
+ private def importableTargetMembers = importableMembers(targetType).toList
// wildcard imports, e.g. import foo._
private def selectorWild = selectors filter (_.name == nme.USCOREkw)
// renamed imports, e.g. import foo.{ bar => baz }
@@ -201,22 +197,16 @@ trait MemberHandlers {
/** Whether this import includes a wildcard import */
val importsWildcard = selectorWild.nonEmpty
- /** Whether anything imported is implicit .*/
- def importsImplicit = implicitSymbols.nonEmpty
-
def implicitSymbols = importedSymbols filter (_.isImplicit)
def importedSymbols = individualSymbols ++ wildcardSymbols
- lazy val individualSymbols: List[Symbol] =
- beforePickler(individualNames map (targetType nonPrivateMember _))
-
- lazy val wildcardSymbols: List[Symbol] =
- if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
- else Nil
+ private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet
+ lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name)))
+ lazy val wildcardSymbols: List[Symbol] = exitingTyper(if (importsWildcard) importableTargetMembers else Nil)
/** Complete list of names imported by a wildcard */
lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name)
- lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
+ lazy val individualNames: List[Name] = individualSymbols map (_.name)
/** The names imported by this statement */
override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
diff --git a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
index eff0ef59c5..a0af72940a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/NamedParam.scala
+++ b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala
@@ -10,18 +10,15 @@ import NamedParam._
import scala.language.implicitConversions
import scala.reflect.runtime.{universe => ru}
import scala.reflect.{ClassTag, classTag}
+import scala.tools.nsc.typechecker.{ TypeStrings }
trait NamedParamCreator {
protected def freshName: () => String
- def apply(name: String, tpe: String, value: Any): NamedParam = NamedParamClass(name, tpe, value)
def apply[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = new Typed[T](name, x)
def apply[T: ru.TypeTag : ClassTag](x: T): NamedParam = apply(freshName(), x)
-
def clazz(name: String, x: Any): NamedParam = new Untyped(name, x)
- def clazz(x: Any): NamedParam = clazz(freshName(), x)
- implicit def namedValue[T: ru.TypeTag : ClassTag](name: String, x: T): NamedParam = apply(name, x)
implicit def tuple[T: ru.TypeTag : ClassTag](pair: (String, T)): NamedParam = apply(pair._1, pair._2)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala
index 0d03a8669a..7f577b3a8b 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala
@@ -3,9 +3,12 @@
* @author Paul Phillips
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
+import scala.util.Properties.lineSeparator
+
/** This is for name logic which is independent of the compiler (notice there's no Global.)
* That includes at least generating, metaquoting, mangling, and unmangling.
*/
@@ -18,8 +21,14 @@ trait Naming {
// <ESC> for ansi codes.
val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
// Lots of binary chars - translate all supposed whitespace into spaces
- if (binaryChars > 5)
- cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
+ // except supposed line endings, otherwise scrubbed lines run together
+ if (binaryChars > 5) // more than one can count while holding a hamburger
+ cleaned map {
+ case c if lineSeparator contains c => c
+ case c if c.isWhitespace => ' '
+ case c if c < 32 => '?'
+ case c => c
+ }
// Not lots - preserve whitespace and ESC
else
cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
@@ -78,7 +87,6 @@ trait Naming {
private lazy val userVar = new NameCreator(sessionNames.res) // var name, like res0
private lazy val internalVar = new NameCreator(sessionNames.ires) // internal var name, like $ires0
- def isLineName(name: String) = (name startsWith sessionNames.line) && (name stripPrefix sessionNames.line forall (_.isDigit))
def isUserVarName(name: String) = userVar didGenerate name
def isInternalVarName(name: String) = internalVar didGenerate name
diff --git a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
index b0be956df8..672a6fd28f 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Parsed.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
import util.returning
/** One instance of a command buffer.
@@ -18,7 +17,6 @@ class Parsed private (
) extends Delimited {
def isEmpty = args.isEmpty
def isUnqualified = args.size == 1
- def isQualified = args.size > 1
def isAtStart = cursor <= 0
private var _verbosity = 0
@@ -32,7 +30,6 @@ class Parsed private (
def bufferTail = new Parsed(buffer drop headLength, cursor - headLength, delimited) withVerbosity verbosity
def prev = new Parsed(buffer, cursor - 1, delimited) withVerbosity verbosity
- def next = new Parsed(buffer, cursor + 1, delimited) withVerbosity verbosity
def currentChar = buffer(cursor)
def currentArg = args.last
def position =
@@ -42,8 +39,6 @@ class Parsed private (
def isFirstDelimiter = !isEmpty && isDelimiterChar(buffer.head)
def isLastDelimiter = !isEmpty && isDelimiterChar(buffer.last)
- def firstIfDelimiter = if (isFirstDelimiter) buffer.head.toString else ""
- def lastIfDelimiter = if (isLastDelimiter) buffer.last.toString else ""
def isQuoted = false // TODO
def isEscaped = !isAtStart && isEscapeChar(currentChar) && !isEscapeChar(prev.currentChar)
@@ -57,13 +52,9 @@ object Parsed {
private def onull(s: String) = if (s == null) "" else s
- def apply(s: String): Parsed = apply(onull(s), onull(s).length)
def apply(s: String, cursor: Int): Parsed = apply(onull(s), cursor, DefaultDelimiters)
def apply(s: String, cursor: Int, delimited: Char => Boolean): Parsed =
new Parsed(onull(s), cursor, delimited)
- def dotted(s: String): Parsed = dotted(onull(s), onull(s).length)
def dotted(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ == '.')
-
- def undelimited(s: String, cursor: Int): Parsed = new Parsed(onull(s), cursor, _ => false)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
index f5db3d9e3a..f5db3d9e3a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Pasted.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala
index 638944713a..f625124e70 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
+import scala.collection.immutable
import scala.language.implicitConversions
/** Mix this into an object and use it as a phasing
@@ -24,7 +24,6 @@ trait Phased {
case NoPhaseName => false
case name => active = name ; true
}
- def getMulti = multi
def setMulti(phases: Seq[PhaseName]): Boolean = {
if (phases contains NoPhaseName) false
else {
@@ -66,16 +65,8 @@ trait Phased {
try parseInternal(str)
catch { case _: Exception => NoPhaseName }
- def apply[T](body: => T) = immutable.SortedMap[PhaseName, T](atMap(PhaseName.all)(body): _*)
-
- def atCurrent[T](body: => T): T = atPhase(get)(body)
+ def atCurrent[T](body: => T): T = enteringPhase(get)(body)
def multi[T](body: => T): Seq[T] = multi map (ph => at(ph)(body))
- def all[T](body: => T): Seq[T] = atMulti(PhaseName.all)(body)
- def show[T](body: => T): Seq[T] = {
- val pairs = atMap(PhaseName.all)(body)
- pairs foreach { case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240)) }
- pairs map (_._2)
- }
def at[T](ph: PhaseName)(body: => T): T = {
val saved = get
@@ -90,11 +81,6 @@ trait Phased {
finally setMulti(saved)
}
- def showAt[T](phs: Seq[PhaseName])(body: => T): Unit =
- atMap[T](phs)(body) foreach {
- case (ph, op) => Console.println("%15s -> %s".format(ph, op.toString take 240))
- }
-
def atMap[T](phs: Seq[PhaseName])(body: => T): Seq[(PhaseName, T)] =
phs zip atMulti(phs)(body)
@@ -112,16 +98,12 @@ trait Phased {
def apply(id: Int): PhaseName = all find (_.id == id) getOrElse NoPhaseName
implicit def apply(s: String): PhaseName = nameMap(s)
- implicit def defaultPhaseName: PhaseName = active
}
sealed abstract class PhaseName {
lazy val id = phase.id
lazy val name = toString.toLowerCase
def phase = currentRun.phaseNamed(name)
def isEmpty = this eq NoPhaseName
-
- // Execute some code during this phase.
- def apply[T](body: => T): T = atPhase(phase)(body)
}
case object Parser extends PhaseName
@@ -158,5 +140,4 @@ trait Phased {
}
implicit def phaseEnumToPhase(name: PhaseName): Phase = name.phase
- implicit def phaseNameToPhase(name: String): Phase = currentRun.phaseNamed(name)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala
index 5e6bf8824d..da6d271a68 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Power.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Power.scala
@@ -8,8 +8,6 @@ package interpreter
import scala.collection.{ mutable, immutable }
import scala.util.matching.Regex
-import scala.reflect.internal.util.{ BatchSourceFile }
-import session.{ History }
import scala.io.Codec
import java.net.{ URL, MalformedURLException }
import io.{ Path }
@@ -48,7 +46,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
import intp.{ beQuietDuring, typeOfExpression, interpret, parse }
import intp.global._
import definitions.{ compilerTypeFromTag, compilerSymbolFromTag}
- import rootMirror.{ getClassIfDefined, getModuleIfDefined }
abstract class SymSlurper {
def isKeep(sym: Symbol): Boolean
@@ -73,7 +70,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
pass += 1
val (repeats, unseen) = todo partition seen
unseenHistory += unseen.size
- if (opt.verbose) {
+ if (settings.verbose) {
println("%3d %s accumulated, %s discarded. This pass: %s unseen, %s repeats".format(
pass, keep.size, discarded, unseen.size, repeats.size))
}
@@ -148,21 +145,10 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
// First we create the ReplVals instance and bind it to $r
intp.bind("$r", replVals)
// Then we import everything from $r.
- intp interpret ("import " + intp.pathToTerm("$r") + "._")
+ intp interpret ("import " + intp.originalPath("$r") + "._")
// And whatever else there is to do.
init.lines foreach (intp interpret _)
}
- def valsDescription: String = {
- def to_str(m: Symbol) = "%12s %s".format(
- m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.")
-
- ( rutil.info[ReplValsImpl].membersDeclared
- filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor)
- sortBy (_.decodedName)
- map to_str
- mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "")
- )
- }
trait LowPriorityInternalInfo {
implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfo[T] = new InternalInfo[T](None)
@@ -175,12 +161,7 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
* symbol, by only implicitly installing one method, "?", and the rest
* of the conveniences exist on that wrapper.
*/
- trait LowPriorityInternalInfoWrapper {
- implicit def apply[T: ru.TypeTag : ClassTag] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None)
- }
- object InternalInfoWrapper extends LowPriorityInternalInfoWrapper {
-
- }
+ trait LowPriorityInternalInfoWrapper { }
class InternalInfoWrapper[T: ru.TypeTag : ClassTag](value: Option[T] = None) {
def ? : InternalInfo[T] = new InternalInfo[T](value)
}
@@ -190,7 +171,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
* customizable symbol filter (had to hardcode no-spec to reduce noise)
*/
class InternalInfo[T](value: Option[T] = None)(implicit typeEvidence: ru.TypeTag[T], runtimeClassEvidence: ClassTag[T]) {
- private def newInfo[U: ru.TypeTag : ClassTag](value: U): InternalInfo[U] = new InternalInfo[U](Some(value))
private def isSpecialized(s: Symbol) = s.name.toString contains "$mc"
private def isImplClass(s: Symbol) = s.name.toString endsWith "$class"
@@ -201,47 +181,15 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
|| s.isAnonOrRefinementClass
|| s.isAnonymousFunction
)
- def symbol = compilerSymbolFromTag(tag)
- def tpe = compilerTypeFromTag(tag)
- def name = symbol.name
- def companion = symbol.companionSymbol
- def info = symbol.info
- def moduleClass = symbol.moduleClass
- def owner = symbol.owner
- def owners = symbol.ownerChain drop 1
- def signature = symbol.defString
-
- def decls = info.decls
- def declsOverride = membersDeclared filter (_.isOverride)
- def declsOriginal = membersDeclared filterNot (_.isOverride)
-
+ def symbol = compilerSymbolFromTag(tag)
+ def tpe = compilerTypeFromTag(tag)
def members = membersUnabridged filterNot excludeMember
def membersUnabridged = tpe.members.toList
- def membersDeclared = members filterNot excludeMember
- def membersInherited = members filterNot (membersDeclared contains _)
- def memberTypes = members filter (_.name.isTypeName)
- def memberMethods = members filter (_.isMethod)
-
- def pkg = symbol.enclosingPackage
- def pkgName = pkg.fullName
- def pkgClass = symbol.enclosingPackageClass
- def pkgMembers = pkg.info.members filterNot excludeMember
- def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage)
- def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember
-
- def tag = typeEvidence
- def runtimeClass = runtimeClassEvidence.runtimeClass
- def shortClass = runtimeClass.getName split "[$.]" last
-
- def baseClasses = tpe.baseClasses
- def baseClassDecls = mapFrom(baseClasses)(_.info.decls.toList.sortBy(_.name))
- def ancestors = baseClasses drop 1
- def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol)
- def baseTypes = tpe.baseTypeSeq.toList
-
- def <:<[U: ru.TypeTag : ClassTag](other: U) = tpe <:< newInfo(other).tpe
- def lub[U: ru.TypeTag : ClassTag](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe))
- def glb[U: ru.TypeTag : ClassTag](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe))
+ def pkg = symbol.enclosingPackage
+ def tag = typeEvidence
+ def runtimeClass = runtimeClassEvidence.runtimeClass
+ def shortClass = runtimeClass.getName split "[$.]" last
+ def baseClasses = tpe.baseClasses
override def toString = value match {
case Some(x) => "%s (%s)".format(x, shortClass)
@@ -267,7 +215,6 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
}
object Prettifier extends LowPriorityPrettifier {
def stringOf(x: Any): String = scala.runtime.ScalaRunTime.stringOf(x)
- def prettify[T](value: T): TraversableOnce[String] = default[T] prettify value
def default[T] = new Prettifier[T] {
def prettify(x: T): TraversableOnce[String] = AnyPrettifier prettify x
def show(x: T): Unit = AnyPrettifier show x
@@ -277,45 +224,21 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
def show(x: T): Unit
def prettify(x: T): TraversableOnce[String]
- def show(xs: TraversableOnce[T]): Unit = prettify(xs) foreach println
def prettify(xs: TraversableOnce[T]): TraversableOnce[String] = xs flatMap (x => prettify(x))
}
abstract class PrettifierClass[T: Prettifier]() {
val pretty = implicitly[Prettifier[T]]
- import pretty._
-
def value: Seq[T]
def pp(f: Seq[T] => Seq[T]): Unit =
pretty prettify f(value) foreach (StringPrettifier show _)
def freq[U](p: T => U) = (value.toSeq groupBy p mapValues (_.size)).toList sortBy (-_._2) map (_.swap)
- def ppfreq[U](p: T => U): Unit = freq(p) foreach { case (count, key) => println("%5d %s".format(count, key)) }
-
- def |[U](f: Seq[T] => Seq[U]): Seq[U] = f(value)
- def ^^[U](f: T => U): Seq[U] = value map f
- def ^?[U](pf: PartialFunction[T, U]): Seq[U] = value collect pf
- def >>!(implicit ord: Ordering[T]): Unit = pp(_.sorted.distinct)
def >>(implicit ord: Ordering[T]): Unit = pp(_.sorted)
def >!(): Unit = pp(_.distinct)
def >(): Unit = pp(identity)
-
- def >#(): Unit = this ># (identity[T] _)
- def >#[U](p: T => U): Unit = this ppfreq p
-
- def >?(p: T => Boolean): Unit = pp(_ filter p)
- def >?(s: String): Unit = pp(_ filter (_.toString contains s))
- def >?(r: Regex): Unit = pp(_ filter (_.toString matches fixRegex(r)))
-
- private def fixRegex(r: scala.util.matching.Regex): String = {
- val s = r.pattern.toString
- val prefix = if (s startsWith "^") "" else """^.*?"""
- val suffix = if (s endsWith "$") "" else """.*$"""
-
- prefix + s + suffix
- }
}
class MultiPrettifierClass[T: Prettifier](val value: Seq[T]) extends PrettifierClass[T]() { }
@@ -339,17 +262,11 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
class RichReplURL(url: URL)(implicit codec: Codec) {
def slurp(): String = io.Streamable.slurp(url)
}
- class RichSymbolList(syms: List[Symbol]) {
- def sigs = syms map (_.defString)
- def infos = syms map (_.info)
- }
trait Implicits1 {
// fallback
implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) =
new SinglePrettifierClass[T](x)
-
- implicit def liftToTypeName(s: String): TypeName = newTypeName(s)
}
trait Implicits2 extends Implicits1 {
class RichSymbol(sym: Symbol) {
@@ -374,26 +291,13 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in)
implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec)
-
- implicit def liftToTermName(s: String): TermName = newTermName(s)
- implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs)
}
trait ReplUtilities {
- // [Eugene to Paul] needs review!
- // def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING)
- // def clazz[T: Manifest] = getClassIfDefined(manifest[T].erasure.getName)
def module[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isPackage)
def clazz[T: ru.TypeTag] = ru.typeOf[T].typeSymbol.suchThat(_.isClass)
def info[T: ru.TypeTag : ClassTag] = InternalInfo[T]
def ?[T: ru.TypeTag : ClassTag] = InternalInfo[T]
- def url(s: String) = {
- try new URL(s)
- catch { case _: MalformedURLException =>
- if (Path(s).exists) Path(s).toURL
- else new URL("http://" + s)
- }
- }
def sanitize(s: String): String = sanitize(s.getBytes())
def sanitize(s: Array[Byte]): String = (s map {
case x if x.toChar.isControl => '?'
@@ -411,20 +315,12 @@ class Power[ReplValsImpl <: ReplVals : ru.TypeTag: ClassTag](val intp: IMain, re
lazy val rutil: ReplUtilities = new ReplUtilities { }
lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { }
- def context(code: String) = analyzer.rootContext(unit(code))
- def source(code: String) = newSourceFile(code)
- def unit(code: String) = newCompilationUnit(code)
- def trees(code: String) = parse(code) getOrElse Nil
- def typeOf(id: String) = intp.typeOfExpression(id)
+ def unit(code: String) = newCompilationUnit(code)
+ def trees(code: String) = parse(code) getOrElse Nil
- override def toString = """
+ override def toString = s"""
|** Power mode status **
- |Default phase: %s
- |Names: %s
- |Identifiers: %s
- """.stripMargin.format(
- phased.get,
- intp.allDefinedNames mkString " ",
- intp.unqualifiedIds mkString " "
- )
+ |Default phase: ${phased.get}
+ |Names: ${intp.unqualifiedIds mkString " "}
+ """.stripMargin
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
index 7cd0f436c4..3392ea0b5e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplConfig.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala
@@ -14,9 +14,7 @@ trait ReplConfig {
lazy val replProps = new ReplProps
class TapMaker[T](x: T) {
- def tapInfo(msg: => String): T = tap(x => replinfo(parens(x)))
def tapDebug(msg: => String): T = tap(x => repldbg(parens(x)))
- def tapTrace(msg: => String): T = tap(x => repltrace(parens(x)))
def tap[U](f: T => U): T = {
f(x)
x
@@ -28,12 +26,6 @@ trait ReplConfig {
try Console println msg
catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) }
- private[nsc] def repldbgex(ex: Throwable): Unit = {
- if (isReplDebug) {
- echo("Caught/suppressing: " + ex)
- ex.printStackTrace
- }
- }
private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg)
private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg)
private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg)
@@ -45,14 +37,10 @@ trait ReplConfig {
repltrace(stackTraceString(unwrap(t)))
alt
}
- private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T =
- substituteAndLog("" + alt, alt)(body)
private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = {
try body
catch logAndDiscard(label, alt)
}
- private[nsc] def squashAndLog(label: String)(body: => Unit): Unit =
- substituteAndLog(label, ())(body)
def isReplTrace: Boolean = replProps.trace
def isReplDebug: Boolean = replProps.debug || isReplTrace
diff --git a/src/repl/scala/tools/nsc/interpreter/ReplDir.scala b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
new file mode 100644
index 0000000000..5d386b47b7
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala
@@ -0,0 +1,48 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package interpreter
+
+import io.VirtualDirectory
+import settings.MutableSettings
+import scala.reflect.io.{ AbstractFile, PlainDirectory, Directory }
+import scala.collection.generic.Clearable
+
+/** Directory to save .class files to. */
+trait ReplDir extends AbstractFile with Clearable { }
+
+private class ReplVirtualDir() extends VirtualDirectory("(memory)", None) with ReplDir { }
+private class ReplRealDir(dir: Directory) extends PlainDirectory(dir) with ReplDir {
+ def clear() = {
+ dir.deleteRecursively()
+ dir.createDirectory()
+ }
+}
+
+class ReplOutput(val dirSetting: MutableSettings#StringSetting) {
+ // outdir for generated classfiles - may be in-memory (the default),
+ // a generated temporary directory, or a specified outdir.
+ val dir: ReplDir = (
+ if (dirSetting.isDefault)
+ new ReplVirtualDir()
+ else if (dirSetting.value == "")
+ new ReplRealDir(Directory.makeTemp("repl"))
+ else
+ new ReplRealDir(Directory(dirSetting.value))
+ )
+
+ // print the contents hierarchically
+ def show(out: JPrintWriter) = {
+ def pp(root: AbstractFile, indentLevel: Int) {
+ val label = root.name
+ val spaces = " " * indentLevel
+ out.println(spaces + label)
+ if (root.isDirectory)
+ root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+ }
+ pp(dir, 0)
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
index 7c698a2f3e..51fab3082e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplGlobal.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala
@@ -6,7 +6,6 @@
package scala.tools.nsc
package interpreter
-import reporters._
import typechecker.Analyzer
/** A layer on top of Global so I can guarantee some extra
@@ -24,8 +23,16 @@ trait ReplGlobal extends Global {
override lazy val analyzer = new {
val global: ReplGlobal.this.type = ReplGlobal.this
} with Analyzer {
+
+ override protected def findMacroClassLoader(): ClassLoader = {
+ val loader = super.findMacroClassLoader
+ macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs))
+ val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get
+ new util.AbstractFileClassLoader(virtualDirectory, loader) {}
+ }
+
override def newTyper(context: Context): Typer = new Typer(context) {
- override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def typed(tree: Tree, mode: Mode, pt: Type): Tree = {
val res = super.typed(tree, mode, pt)
tree match {
case Ident(name) if !tree.symbol.hasPackageFlag && !name.toString.startsWith("$") =>
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
index bc3e7a10d7..2364918494 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplProps.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala
@@ -13,15 +13,11 @@ class ReplProps {
private def bool(name: String) = BooleanProp.keyExists(name)
private def int(name: String) = IntProp(name)
- val jlineDebug = bool("scala.tools.jline.internal.Log.debug")
- val jlineTrace = bool("scala.tools.jline.internal.Log.trace")
-
val info = bool("scala.repl.info")
val debug = bool("scala.repl.debug")
val trace = bool("scala.repl.trace")
val power = bool("scala.repl.power")
- val replInitCode = Prop[JFile]("scala.repl.initcode")
val replAutorunCode = Prop[JFile]("scala.repl.autoruncode")
val powerInitCode = Prop[JFile]("scala.repl.power.initcode")
val powerBanner = Prop[JFile]("scala.repl.power.banner")
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
index b20166d070..b20166d070 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplReporter.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
index f8ecc6c6fe..08472bbc64 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplStrings.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala
@@ -6,8 +6,6 @@
package scala.tools.nsc
package interpreter
-import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.cond
import scala.reflect.internal.Chars
trait ReplStrings {
@@ -31,5 +29,4 @@ trait ReplStrings {
"scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen)
def words(s: String) = s.trim split "\\s+" filterNot (_ == "") toList
- def isQuoted(s: String) = (s.length >= 2) && (s.head == s.last) && ("\"'" contains s.head)
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
index 53478bdc5d..ea100b25f2 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala
@@ -57,7 +57,6 @@ object ReplVals {
*/
def mkCompilerTypeFromTag[T <: Global](global: T) = {
import global._
- import definitions._
/** We can't use definitions.compilerTypeFromTag directly because we're passing
* it to map and the compiler refuses to perform eta expansion on a method
diff --git a/src/compiler/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala
index e400906a58..e400906a58 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Results.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Results.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
index 4371f7fe05..36cdf65510 100644
--- a/src/compiler/scala/tools/nsc/interpreter/RichClass.scala
+++ b/src/repl/scala/tools/nsc/interpreter/RichClass.scala
@@ -10,7 +10,6 @@ import scala.reflect.{ ClassTag, classTag }
class RichClass[T](val clazz: Class[T]) {
def toTag: ClassTag[T] = ClassTag[T](clazz)
- def toTypeString: String = TypeStrings.fromClazz(clazz)
// Sadly isAnonymousClass does not return true for scala anonymous
// classes because our naming scheme is not doing well against the
@@ -20,14 +19,12 @@ class RichClass[T](val clazz: Class[T]) {
catch { case _: java.lang.InternalError => false } // good ol' "Malformed class name"
)
- /** It's not easy... to be... me... */
- def supermans: List[ClassTag[_]] = supers map (_.toTag)
+ def supertags: List[ClassTag[_]] = supers map (_.toTag)
def superNames: List[String] = supers map (_.getName)
def interfaces: List[JClass] = supers filter (_.isInterface)
def hasAncestorName(f: String => Boolean) = superNames exists f
def hasAncestor(f: JClass => Boolean) = supers exists f
- def hasAncestorInPackage(pkg: String) = hasAncestorName(_ startsWith (pkg + "."))
def supers: List[JClass] = {
def loop(x: JClass): List[JClass] = x.getSuperclass match {
diff --git a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
index bccd8158ec..6634dc6944 100644
--- a/src/compiler/scala/tools/nsc/interpreter/SimpleReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala
@@ -3,7 +3,8 @@
* @author Stepan Koltsov
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package interpreter
import java.io.{ BufferedReader }
@@ -19,11 +20,8 @@ extends InteractiveReader
val history = NoHistory
val completion = NoCompletion
- def init() = ()
def reset() = ()
- def eraseLine() = ()
def redrawLine() = ()
- def currentLine = ""
def readOneLine(prompt: String): String = {
if (interactive) {
out.print(prompt)
@@ -40,4 +38,4 @@ object SimpleReader {
def apply(in: BufferedReader = defaultIn, out: JPrintWriter = defaultOut, interactive: Boolean = true): SimpleReader =
new SimpleReader(in, out, interactive)
-} \ No newline at end of file
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
new file mode 100644
index 0000000000..ebbb397a0c
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala
@@ -0,0 +1,15 @@
+package scala.tools.nsc
+package interpreter
+
+import scala.tools.reflect.StdTags
+import scala.reflect.runtime.{ universe => ru }
+
+trait StdReplTags extends StdTags {
+ lazy val tagOfStdReplVals = tagOfStaticClass[StdReplVals]
+ lazy val tagOfIMain = tagOfStaticClass[IMain]
+}
+
+object StdReplTags extends StdTags with StdReplTags {
+ val u: ru.type = ru
+ val m = u.runtimeMirror(getClass.getClassLoader)
+}
diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala
new file mode 100644
index 0000000000..f82c38f5e7
--- /dev/null
+++ b/src/repl/scala/tools/nsc/interpreter/package.scala
@@ -0,0 +1,199 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+
+import scala.language.implicitConversions
+import scala.reflect.{ classTag, ClassTag }
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ClassTag, classTag}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
+import scala.util.control.Exception.catching
+
+/** The main REPL related classes and values are as follows.
+ * In addition to standard compiler classes Global and Settings, there are:
+ *
+ * History: an interface for session history.
+ * Completion: an interface for tab completion.
+ * ILoop (formerly InterpreterLoop): The umbrella class for a session.
+ * IMain (formerly Interpreter): Handles the evolving state of the session
+ * and handles submitting code to the compiler and handling the output.
+ * InteractiveReader: how ILoop obtains input.
+ * History: an interface for session history.
+ * Completion: an interface for tab completion.
+ * Power: a repository for more advanced/experimental features.
+ *
+ * ILoop contains { in: InteractiveReader, intp: IMain, settings: Settings, power: Power }
+ * InteractiveReader contains { history: History, completion: Completion }
+ * IMain contains { global: Global }
+ */
+package object interpreter extends ReplConfig with ReplStrings {
+ type JFile = java.io.File
+ type JClass = java.lang.Class[_]
+ type JList[T] = java.util.List[T]
+ type JCollection[T] = java.util.Collection[T]
+ type JPrintWriter = java.io.PrintWriter
+ type InputStream = java.io.InputStream
+ type OutputStream = java.io.OutputStream
+
+ val IR = Results
+
+ implicit def postfixOps = scala.language.postfixOps // make all postfix ops in this package compile without warning
+
+ private[interpreter] implicit def javaCharSeqCollectionToScala(xs: JCollection[_ <: CharSequence]): List[String] = {
+ import scala.collection.JavaConverters._
+ xs.asScala.toList map ("" + _)
+ }
+
+ private[nsc] implicit def enrichClass[T](clazz: Class[T]) = new RichClass[T](clazz)
+ private[nsc] implicit def enrichAnyRefWithTap[T](x: T) = new TapMaker(x)
+ private[nsc] def debugging[T](msg: String)(x: T) = x.tapDebug(msg)
+
+ private val ourClassloader = getClass.getClassLoader
+
+ def staticTypeTag[T: ClassTag]: ru.TypeTag[T] = ru.TypeTag[T](
+ ru.runtimeMirror(ourClassloader),
+ new TypeCreator {
+ def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
+ m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
+ })
+
+ /** This class serves to trick the compiler into treating a var
+ * (intp, in ILoop) as a stable identifier.
+ */
+ implicit class IMainOps(val intp: IMain) {
+ import intp._
+ import global.{ reporter => _, _ }
+ import definitions._
+
+ protected def echo(msg: String) = {
+ Console.out println msg
+ Console.out.flush()
+ }
+
+ def implicitsCommand(line: String): String = {
+ def p(x: Any) = intp.reporter.printMessage("" + x)
+
+ // If an argument is given, only show a source with that
+ // in its name somewhere.
+ val args = line split "\\s+"
+ val filtered = intp.implicitSymbolsBySource filter {
+ case (source, syms) =>
+ (args contains "-v") || {
+ if (line == "") (source.fullName.toString != "scala.Predef")
+ else (args exists (source.name.toString contains _))
+ }
+ }
+
+ if (filtered.isEmpty)
+ return "No implicits have been imported other than those in Predef."
+
+ filtered foreach {
+ case (source, syms) =>
+ p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
+
+ // This groups the members by where the symbol is defined
+ val byOwner = syms groupBy (_.owner)
+ val sortedOwners = byOwner.toList sortBy { case (owner, _) => exitingTyper(source.info.baseClasses indexOf owner) }
+
+ sortedOwners foreach {
+ case (owner, members) =>
+ // Within each owner, we cluster results based on the final result type
+ // if there are more than a couple, and sort each cluster based on name.
+ // This is really just trying to make the 100 or so implicits imported
+ // by default into something readable.
+ val memberGroups: List[List[Symbol]] = {
+ val groups = members groupBy (_.tpe.finalResultType) toList
+ val (big, small) = groups partition (_._2.size > 3)
+ val xss = (
+ (big sortBy (_._1.toString) map (_._2)) :+
+ (small flatMap (_._2))
+ )
+
+ xss map (xs => xs sortBy (_.name.toString))
+ }
+
+ val ownerMessage = if (owner == source) " defined in " else " inherited from "
+ p(" /* " + members.size + ownerMessage + owner.fullName + " */")
+
+ memberGroups foreach { group =>
+ group foreach (s => p(" " + intp.symbolDefString(s)))
+ p("")
+ }
+ }
+ p("")
+ }
+ ""
+ }
+
+ def kindCommandInternal(expr: String, verbose: Boolean): Unit = {
+ val catcher = catching(classOf[MissingRequirementError],
+ classOf[ScalaReflectionException])
+ def typeFromTypeString: Option[ClassSymbol] = catcher opt {
+ exprTyper.typeOfTypeString(expr).typeSymbol.asClass
+ }
+ def typeFromNameTreatedAsTerm: Option[ClassSymbol] = catcher opt {
+ val moduleClass = exprTyper.typeOfExpression(expr).typeSymbol
+ moduleClass.linkedClassOfClass.asClass
+ }
+ def typeFromFullName: Option[ClassSymbol] = catcher opt {
+ intp.global.rootMirror.staticClass(expr)
+ }
+ def typeOfTerm: Option[TypeSymbol] = replInfo(symbolOfLine(expr)).typeSymbol match {
+ case sym: TypeSymbol => Some(sym)
+ case _ => None
+ }
+ (typeFromTypeString orElse typeFromNameTreatedAsTerm orElse typeFromFullName orElse typeOfTerm) foreach { sym =>
+ val (kind, tpe) = exitingTyper {
+ val tpe = sym.tpeHK
+ (intp.global.inferKind(NoPrefix)(tpe, sym.owner), tpe)
+ }
+ echoKind(tpe, kind, verbose)
+ }
+ }
+
+ def echoKind(tpe: Type, kind: Kind, verbose: Boolean) {
+ def typeString(tpe: Type): String = {
+ tpe match {
+ case TypeRef(_, sym, _) => typeString(sym.typeSignature)
+ case RefinedType(_, _) => tpe.toString
+ case _ => tpe.typeSymbol.fullName
+ }
+ }
+ printAfterTyper(typeString(tpe) + "'s kind is " + kind.scalaNotation)
+ if (verbose) {
+ echo(kind.starNotation)
+ echo(kind.description)
+ }
+ }
+
+ /** TODO -
+ * -n normalize
+ * -l label with case class parameter names
+ * -c complete - leave nothing out
+ */
+ def typeCommandInternal(expr: String, verbose: Boolean): Unit =
+ symbolOfLine(expr) andAlso (echoTypeSignature(_, verbose))
+
+ def printAfterTyper(msg: => String) =
+ reporter printUntruncatedMessage exitingTyper(msg)
+
+ private def replInfo(sym: Symbol) =
+ if (sym.isAccessor) dropNullaryMethod(sym.info) else sym.info
+
+ def echoTypeStructure(sym: Symbol) =
+ printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+ def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+ if (verbose) echo("// Type signature")
+ printAfterTyper("" + replInfo(sym))
+
+ if (verbose) {
+ echo("\n// Internal Type structure")
+ echoTypeStructure(sym)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
index dddfb1b8f6..dddfb1b8f6 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/FileBackedHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala
index daa05b86db..794d41adc7 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/History.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala
@@ -14,15 +14,9 @@ trait History {
def asStrings: List[String]
def index: Int
def size: Int
- def grep(s: String): List[String]
}
object NoHistory extends History {
def asStrings = Nil
- def grep(s: String) = Nil
def index = 0
def size = 0
}
-
-object History {
- def empty: History = NoHistory
-}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
index 18e0ee7c85..18e0ee7c85 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/JLineHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/JLineHistory.scala
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 9f4e2b9df3..89998e438a 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -54,9 +54,5 @@ class SimpleHistory extends JLineHistory {
def moveTo(idx: Int) = (idx > 0) && (idx <= lastIndex) && setTo(idx)
def moveToEnd(): Unit = setTo(size)
- // scala legacy interface
- def asList: List[JEntry] = toEntries().toList
- def asJavaList = entries()
- def asStrings = buf.toList
- def grep(s: String) = buf.toList filter (_ contains s)
+ def asStrings = buf.toList
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala
index c62cf21151..c62cf21151 100644
--- a/src/compiler/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala
diff --git a/src/scalacheck/org/scalacheck/Arbitrary.scala b/src/scalacheck/org/scalacheck/Arbitrary.scala
index 8c43cdaafe..db4163c8af 100644
--- a/src/scalacheck/org/scalacheck/Arbitrary.scala
+++ b/src/scalacheck/org/scalacheck/Arbitrary.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -10,49 +10,44 @@
package org.scalacheck
import util.{FreqMap,Buildable}
-import scala.reflect.ClassTag
sealed abstract class Arbitrary[T] {
val arbitrary: Gen[T]
}
-/** Defines implicit <code>Arbitrary</code> instances for common types.
+/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
* <p>
* ScalaCheck
- * uses implicit <code>Arbitrary</code> instances when creating properties
- * out of functions with the <code>Prop.property</code> method, and when
- * the <code>Arbitrary.arbitrary</code> method is used. For example, the
+ * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
+ * out of functions with the `Prop.property` method, and when
+ * the `Arbitrary.arbitrary` method is used. For example, the
* following code requires that there exists an implicit
- * <code>Arbitrary[MyClass]</code> instance:
+ * `Arbitrary[MyClass]` instance:
* </p>
*
- * <p>
- * <code>
- * val myProp = Prop.forAll { myClass: MyClass =&gt;<br />
- * ...<br />
- * }<br />
+ * {{{
+ * val myProp = Prop.forAll { myClass: MyClass =>
+ * ...
+ * }
*
* val myGen = Arbitrary.arbitrary[MyClass]
- * </code>
- * </p>
+ * }}}
*
* <p>
* The required implicit definition could look like this:
* </p>
*
- * <p>
- * <code>
+ * {{{
* implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
- * </code>
- * </p>
+ * }}}
*
* <p>
- * The factory method <code>Arbitrary(...)</code> takes a generator of type
- * <code>Gen[T]</code> and returns an instance of <code>Arbitrary[T]</code>.
+ * The factory method `Arbitrary(...)` takes a generator of type
+ * `Gen[T]` and returns an instance of `Arbitrary[T]`.
* </p>
*
* <p>
- * The <code>Arbitrary</code> module defines implicit <code>Arbitrary</code>
+ * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
* instances for common types, for convenient use in your properties and
* generators.
* </p>
@@ -93,7 +88,7 @@ object Arbitrary {
/** Arbitrary instance of Long */
implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
- Gen.chooseNum(Long.MinValue / 2, Long.MaxValue / 2)
+ Gen.chooseNum(Long.MinValue, Long.MaxValue)
)
/** Arbitrary instance of Float */
@@ -182,7 +177,13 @@ object Arbitrary {
mc <- mcGen
limit <- value(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
- } yield BigDecimal(x, scale, mc)
+ } yield {
+ try {
+ BigDecimal(x, scale, mc)
+ } catch {
+ case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
+ }
+ }
Arbitrary(bdGen)
}
@@ -213,23 +214,43 @@ object Arbitrary {
))
}
- /** Arbitrary instance of test params */
+ /** Arbitrary instance of test params
+ * @deprecated (in 1.10.0) Use `arbTestParameters` instead.
+ */
+ @deprecated("Use 'arbTestParameters' instead", "1.10.0")
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
minSuccTests <- choose(10,200)
- maxDiscardRatio <- choose(0.2f,10f)
- minSize <- choose(0,500)
+ maxDiscTests <- choose(100,500)
+ mnSize <- choose(0,500)
sizeDiff <- choose(0,500)
- maxSize <- choose(minSize, minSize + sizeDiff)
+ mxSize <- choose(mnSize, mnSize + sizeDiff)
ws <- choose(1,4)
} yield Test.Params(
minSuccessfulTests = minSuccTests,
- maxDiscardRatio = maxDiscardRatio,
- minSize = minSize,
- maxSize = maxSize,
+ maxDiscardedTests = maxDiscTests,
+ minSize = mnSize,
+ maxSize = mxSize,
workers = ws
))
+ /** Arbitrary instance of test parameters */
+ implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
+ Arbitrary(for {
+ _minSuccTests <- choose(10,200)
+ _maxDiscardRatio <- choose(0.2f,10f)
+ _minSize <- choose(0,500)
+ sizeDiff <- choose(0,500)
+ _maxSize <- choose(_minSize, _minSize + sizeDiff)
+ _workers <- choose(1,4)
+ } yield new Test.Parameters.Default {
+ override val minSuccessfulTests = _minSuccTests
+ override val maxDiscardRatio = _maxDiscardRatio
+ override val minSize = _minSize
+ override val maxSize = _maxSize
+ override val workers = _workers
+ })
+
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
Arbitrary(for {
@@ -278,7 +299,7 @@ object Arbitrary {
): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
/** Arbitrary instance of any array. */
- implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassTag[T]
+ implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
diff --git a/src/scalacheck/org/scalacheck/Arg.scala b/src/scalacheck/org/scalacheck/Arg.scala
index 8959211f09..4961c78a26 100644
--- a/src/scalacheck/org/scalacheck/Arg.scala
+++ b/src/scalacheck/org/scalacheck/Arg.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/Commands.scala b/src/scalacheck/org/scalacheck/Commands.scala
index 88ef8ae2a1..604b68cb36 100644
--- a/src/scalacheck/org/scalacheck/Commands.scala
+++ b/src/scalacheck/org/scalacheck/Commands.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -53,7 +53,7 @@ trait Commands extends Prop {
* takes the current abstract state as parameter and returns a boolean
* that says if the precondition is fulfilled or not. You can add several
* conditions to the precondition list */
- val preConditions = new scala.collection.mutable.ListBuffer[State => Boolean]
+ val preConditions = new collection.mutable.ListBuffer[State => Boolean]
/** Returns all postconditions merged into a single function */
def postCondition: (State,State,Any) => Prop = (s0,s1,r) => all(postConditions.map(_.apply(s0,s1,r)): _*)
@@ -65,7 +65,7 @@ trait Commands extends Prop {
* method. The postcondition function should return a Boolean (or
* a Prop instance) that says if the condition holds or not. You can add several
* conditions to the postConditions list. */
- val postConditions = new scala.collection.mutable.ListBuffer[(State,State,Any) => Prop]
+ val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop]
}
/** A command that binds its result for later use */
diff --git a/src/scalacheck/org/scalacheck/ConsoleReporter.scala b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
index 93f1dc222e..d565322d99 100644
--- a/src/scalacheck/org/scalacheck/ConsoleReporter.scala
+++ b/src/scalacheck/org/scalacheck/ConsoleReporter.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/Gen.scala b/src/scalacheck/org/scalacheck/Gen.scala
index 64bb61c2d3..aec67159f1 100644
--- a/src/scalacheck/org/scalacheck/Gen.scala
+++ b/src/scalacheck/org/scalacheck/Gen.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -23,7 +23,7 @@ object Choose {
implicit val chooseLong: Choose[Long] = new Choose[Long] {
def choose(low: Long, high: Long) =
- if(low > high || (high-low < 0)) fail
+ if (low > high) fail
else parameterized(prms => value(prms.choose(low,high)))
}
@@ -204,9 +204,17 @@ object Gen {
/** @throws IllegalArgumentException if l is greater than h, or if
* the range between l and h doesn't fit in a Long. */
def choose(l: Long, h: Long): Long = {
- val d = h-l
- if (d < 0) throw new IllegalArgumentException("Invalid range")
- else l + math.abs(rng.nextLong % (d+1))
+ if (h < l) throw new IllegalArgumentException("Invalid range")
+ val d = h - l + 1
+ if (d <= 0) {
+ var n = rng.nextLong
+ while (n < l || n > h) {
+ n = rng.nextLong
+ }
+ n
+ } else {
+ l + math.abs(rng.nextLong % d)
+ }
}
/** @throws IllegalArgumentException if l is greater than h, or if
@@ -225,8 +233,11 @@ object Gen {
def apply(p: Gen.Params) = g(p)
}
- /* Convenience method for using the <code>frequency</code> method like this:
- * <code>frequency((1, "foo"), (3, "bar"))</code> */
+ /* Convenience method for using the `frequency` method like this:
+ * {{{
+ * frequency((1, "foo"), (3, "bar"))
+ * }}}
+ */
implicit def freqTuple[T](t: (Int, T)): (Int, Gen[T]) = (t._1, value(t._2))
@@ -308,9 +319,9 @@ object Gen {
//// List Generators ////
/** Generates a container of any type for which there exists an implicit
- * <code>Buildable</code> instance. The elements in the container will
+ * [[org.scalacheck.util.Buildable]] instance. The elements in the container will
* be generated by the given generator. The size of the generated container
- * is given by <code>n</code>. */
+ * is given by `n`. */
def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit b: Buildable[T,C]
): Gen[C[T]] = sequence[C,T](new Iterable[Gen[T]] {
def iterator = new Iterator[Gen[T]] {
@@ -321,14 +332,14 @@ object Gen {
})
/** Generates a container of any type for which there exists an implicit
- * <code>Buildable</code> instance. The elements in the container will
- * be generated by the given generator. The size of the container is
+ * [[org.scalacheck.util.Buildable]] instance. The elements in the container
+ * will be generated by the given generator. The size of the container is
* bounded by the size parameter used when generating values. */
def containerOf[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
sized(size => for(n <- choose(0,size); c <- containerOfN[C,T](n,g)) yield c)
/** Generates a non-empty container of any type for which there exists an
- * implicit <code>Buildable</code> instance. The elements in the container
+ * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the container
* will be generated by the given generator. The size of the container is
* bounded by the size parameter used when generating values. */
def containerOf1[C[_],T](g: Gen[T])(implicit b: Buildable[T,C]): Gen[C[T]] =
@@ -336,16 +347,16 @@ object Gen {
/** Generates a list of random length. The maximum length depends on the
* size parameter. This method is equal to calling
- * <code>containerOf[List,T](g)</code>. */
+ * `containerOf[List,T](g)`. */
def listOf[T](g: => Gen[T]) = containerOf[List,T](g)
/** Generates a non-empty list of random length. The maximum length depends
* on the size parameter. This method is equal to calling
- * <code>containerOf1[List,T](g)</code>. */
+ * `containerOf1[List,T](g)`. */
def listOf1[T](g: => Gen[T]) = containerOf1[List,T](g)
/** Generates a list of the given length. This method is equal to calling
- * <code>containerOfN[List,T](n,g)</code>. */
+ * `containerOfN[List,T](n,g)`. */
def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g)
/** A generator that picks a random number of elements from a list */
diff --git a/src/scalacheck/org/scalacheck/Pretty.scala b/src/scalacheck/org/scalacheck/Pretty.scala
index eeb5936086..3e8f6de5f6 100644
--- a/src/scalacheck/org/scalacheck/Pretty.scala
+++ b/src/scalacheck/org/scalacheck/Pretty.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -96,7 +96,7 @@ object Pretty {
}
implicit def prettyTestRes(res: Test.Result) = Pretty { prms =>
- def labels(ls: scala.collection.immutable.Set[String]) =
+ def labels(ls: collection.immutable.Set[String]) =
if(ls.isEmpty) ""
else "> Labels of failing property: " / ls.mkString("\n")
val s = res.status match {
diff --git a/src/scalacheck/org/scalacheck/Prop.scala b/src/scalacheck/org/scalacheck/Prop.scala
index dfd85a832a..38e00f260f 100644
--- a/src/scalacheck/org/scalacheck/Prop.scala
+++ b/src/scalacheck/org/scalacheck/Prop.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -31,16 +31,27 @@ trait Prop {
/** Convenience method that checks this property with the given parameters
* and reports the result on the console. If you need to get the results
- * from the test use the <code>check</code> methods in <code>Test</code>
- * instead. */
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
+ * instead.
+ * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
+ */
+ @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
def check(prms: Test.Params): Unit = Test.check(
prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
)
+ /** Convenience method that checks this property with the given parameters
+ * and reports the result on the console. If you need to get the results
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
+ * instead. */
+ def check(prms: Test.Parameters): Unit = Test.check(
+ prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
+ )
+
/** Convenience method that checks this property and reports the
* result on the console. If you need to get the results from the test use
- * the <code>check</code> methods in <code>Test</code> instead. */
- def check: Unit = check(Test.Params())
+ * the `check` methods in [[org.scalacheck.Test]] instead. */
+ def check: Unit = check(Test.Parameters.default)
/** The logic for main, separated out to make it easier to
* avoid System.exit calls. Returns exit code.
@@ -60,7 +71,7 @@ trait Prop {
/** Whether main should call System.exit with an exit code.
* Defaults to true; override to change.
*/
- def mainCallsExit = false
+ def mainCallsExit = true
/** Convenience method that makes it possible to use this property
* as an application that checks itself on execution */
@@ -274,20 +285,51 @@ object Prop {
def apply(r: Result): Prop = Prop(prms => r)
+ def apply(b: Boolean): Prop = if(b) proved else falsified
- // Implicit defs
+ // Implicits
+
+ /** A collection of property operators on [[Any]] values.
+ * Import [[Prop.AnyOperators]] to make the operators available. */
class ExtendedAny[T <% Pretty](x: => T) {
+ /** See [[Prop.imply]] */
def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f)
+ /** See [[Prop.iff]] */
def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f)
- def throws[U <: Throwable](c: Class[U]) = Prop.throws(x, c)
+ @deprecated("Use 'Prop.throws' instead", "1.10.1")
+ def throws[U <: Throwable](c: Class[U]): Prop = Prop.throws(c)(x)
+ /** See [[Prop.?=]] */
def ?=(y: T) = Prop.?=(x, y)
+ /** See [[Prop.=?]] */
def =?(y: T) = Prop.=?(x, y)
}
+ /** A collection of property operators on [[Boolean]] values.
+ * Import [[Prop.BooleanOperators]] to make the operators available. */
+ class ExtendedBoolean(b: => Boolean) {
+ /** See [[Prop.==>]] */
+ def ==>(p: => Prop) = Prop(b) ==> p
+ }
+
+ /** Implicit method that makes a number of property operators on values of
+ * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
+ * documentation on the operators. */
+ @deprecated("Use 'Prop.AnyOperators' instead", "1.10.1")
implicit def extendedAny[T <% Pretty](x: => T) = new ExtendedAny[T](x)
- implicit def propBoolean(b: Boolean): Prop = if(b) proved else falsified
+ /** Implicit method that makes a number of property operators on values of
+ * type [[Any]] available in the current scope. See [[Prop.ExtendedAny]] for
+ * documentation on the operators. */
+ implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x)
+
+ /** Implicit method that makes a number of property operators on boolean
+ * values available in the current scope. See [[Prop.ExtendedBoolean]] for
+ * documentation on the operators. */
+ implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b)
+
+ /** Implicit conversion of Boolean values to Prop values. */
+ implicit def propBoolean(b: Boolean): Prop = Prop(b)
// Private support functions
@@ -318,6 +360,9 @@ object Prop {
/** A property that denotes an exception */
lazy val exception: Prop = exception(null)
+ /** Create a property that compares to values. If the values aren't equal,
+ * the property will fail and report that first value doesn't match the
+ * expected (second) value. */
def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if(x == y) proved else falsified :| {
val exp = Pretty.pretty[T](y, Pretty.Params(0))
@@ -325,6 +370,9 @@ object Prop {
"Expected "+exp+" but got "+act
}
+ /** Create a property that compares to values. If the values aren't equal,
+ * the property will fail and report that second value doesn't match the
+ * expected (first) value. */
def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x)
/** A property that depends on the generator size */
@@ -340,7 +388,7 @@ object Prop {
secure(if(f.isDefinedAt(x)) f(x) else undecided)
/** Property holds only if the given partial function is defined at
- * <code>x</code>, and returns a property that holds */
+ * `x`, and returns a property that holds */
def iff[T](x: T, f: PartialFunction[T,Prop]): Prop =
secure(if(f.isDefinedAt(x)) f(x) else falsified)
@@ -365,9 +413,16 @@ object Prop {
def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*)
/** A property that holds if the given statement throws an exception
+ * of the specified type
+ * @deprecated (in 1.10.1) Use `throws(...): Boolean` instead.
+ */
+ @deprecated("Use 'throws(...): Boolean' instead", "1.10.1")
+ def throws[T <: Throwable](x: => Any, c: Class[T]): Prop = throws(c)(x)
+
+ /** Returns true if the given statement throws an exception
* of the specified type */
- def throws[T <: Throwable](x: => Any, c: Class[T]) =
- try { x; falsified } catch { case e if c.isInstance(e) => proved }
+ def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean =
+ try { x; false } catch { case e if c.isInstance(e) => true }
/** Collect data for presentation in test report */
def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms =>
@@ -390,7 +445,7 @@ object Prop {
/** Wraps and protects a property */
def secure[P <% Prop](p: => P): Prop =
- try { p: Prop } catch { case e => exception(e) }
+ try { p: Prop } catch { case e: Throwable => exception(e) }
/** Existential quantifier for an explicit generator. */
def exists[A,P](f: A => P)(implicit
diff --git a/src/scalacheck/org/scalacheck/Properties.scala b/src/scalacheck/org/scalacheck/Properties.scala
index 26059231d6..d4836d7420 100644
--- a/src/scalacheck/org/scalacheck/Properties.scala
+++ b/src/scalacheck/org/scalacheck/Properties.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -14,15 +14,15 @@ package org.scalacheck
* holds if and only if all of the contained properties hold.
* <p>Properties are added in the following way:</p>
*
- * <p>
- * <code>
+ * {{{
* object MyProps extends Properties("MyProps") {
- * property("myProp1") = forAll { (n:Int, m:Int) =&gt;
+ * property("myProp1") = forAll { (n:Int, m:Int) =>
* n+m == m+n
* }
*
* property("myProp2") = ((0/1) throws classOf[ArithmeticException])
* }
+ * }}}
*/
class Properties(val name: String) extends Prop {
@@ -42,16 +42,27 @@ class Properties(val name: String) extends Prop {
/** Convenience method that checks the properties with the given parameters
* and reports the result on the console. If you need to get the results
- * from the test use the <code>check</code> methods in <code>Test</code>
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
* instead. */
+ override def check(prms: Test.Parameters): Unit = Test.checkProperties(
+ prms copy (_testCallback = ConsoleReporter(1) chain prms.testCallback), this
+ )
+
+ /** Convenience method that checks the properties with the given parameters
+ * and reports the result on the console. If you need to get the results
+ * from the test use the `check` methods in [[org.scalacheck.Test]]
+ * instead.
+ * @deprecated (in 1.10.0) Use `check(Test.Parameters)` instead.
+ */
+ @deprecated("Use 'check(Test.Parameters)' instead", "1.10.0")
override def check(prms: Test.Params): Unit = Test.checkProperties(
prms copy (testCallback = ConsoleReporter(1) chain prms.testCallback), this
)
/** Convenience method that checks the properties and reports the
* result on the console. If you need to get the results from the test use
- * the <code>check</code> methods in <code>Test</code> instead. */
- override def check: Unit = check(Test.Params())
+ * the `check` methods in [[org.scalacheck.Test]] instead. */
+ override def check: Unit = check(Test.Parameters.default)
/** The logic for main, separated out to make it easier to
* avoid System.exit calls. Returns exit code.
@@ -73,7 +84,10 @@ class Properties(val name: String) extends Prop {
def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p
/** Used for specifying properties. Usage:
- * <code>property("myProp") = ...</code> */
+ * {{{
+ * property("myProp") = ...
+ * }}}
+ */
class PropertySpecifier() {
def update(propName: String, p: Prop) = props += ((name+"."+propName, p))
}
diff --git a/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
new file mode 100644
index 0000000000..7764101844
--- /dev/null
+++ b/src/scalacheck/org/scalacheck/ScalaCheckFramework.scala
@@ -0,0 +1,92 @@
+/*-------------------------------------------------------------------------*\
+** ScalaCheck **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
+** http://www.scalacheck.org **
+** **
+** This software is released under the terms of the Revised BSD License. **
+** There is NO WARRANTY. See the file LICENSE for the full text. **
+\*------------------------------------------------------------------------ */
+
+// vim: set ts=2 sw=2 et:
+
+package org.scalacheck
+
+import org.scalatools.testing._
+
+class ScalaCheckFramework extends Framework {
+
+ private case object PropFingerprint extends TestFingerprint {
+ val superClassName = "org.scalacheck.Prop"
+ val isModule = false
+ }
+
+ private case object PropsFingerprint extends TestFingerprint {
+ val superClassName = "org.scalacheck.Properties"
+ val isModule = true
+ }
+
+ val name = "ScalaCheck"
+
+ val tests = Array[Fingerprint](PropsFingerprint, PropsFingerprint)
+
+ def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 {
+
+ private def asEvent(nr: (String, Test.Result)) = nr match {
+ case (n: String, r: Test.Result) => new Event {
+ val testName = n
+ val description = n
+ val result = r.status match {
+ case Test.Passed => Result.Success
+ case _:Test.Proved => Result.Success
+ case _:Test.Failed => Result.Failure
+ case Test.Exhausted => Result.Skipped
+ case _:Test.PropException | _:Test.GenException => Result.Error
+ }
+ val error = r.status match {
+ case Test.PropException(_, e, _) => e
+ case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0)))
+ case _ => null
+ }
+ }
+ }
+
+ def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) {
+
+ val testCallback = new Test.TestCallback {
+ override def onPropEval(n: String, w: Int, s: Int, d: Int) = {}
+
+ override def onTestResult(n: String, r: Test.Result) = {
+ for (l <- loggers) {
+ import Pretty._
+ l.info(
+ (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(0))
+ )
+ }
+ handler.handle(asEvent((n,r)))
+ }
+ }
+
+ import Test.cmdLineParser.{Success, NoSuccess}
+ val prms = Test.cmdLineParser.parseParams(args) match {
+ case Success(params, _) =>
+ params.copy(_testCallback = testCallback, _customClassLoader = Some(loader))
+ // TODO: Maybe handle this a bit better than throwing exception?
+ case e: NoSuccess => throw new Exception(e.toString)
+ }
+
+ fingerprint match {
+ case fp: SubclassFingerprint =>
+ if(fp.isModule) {
+ val obj = Class.forName(testClassName + "$", true, loader)
+ val ps = obj.getField("MODULE$").get(null).asInstanceOf[Properties]
+ Test.checkProperties(prms, ps)
+ } else {
+ val p = Class.forName(testClassName, true, loader).newInstance.asInstanceOf[Prop]
+ handler.handle(asEvent((testClassName, Test.check(prms, p))))
+ }
+ }
+ }
+
+ }
+
+}
diff --git a/src/scalacheck/org/scalacheck/Shrink.scala b/src/scalacheck/org/scalacheck/Shrink.scala
index ae15bd9616..4895171a35 100644
--- a/src/scalacheck/org/scalacheck/Shrink.scala
+++ b/src/scalacheck/org/scalacheck/Shrink.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/Test.scala b/src/scalacheck/org/scalacheck/Test.scala
index 4368184823..6e9b6b88fd 100644
--- a/src/scalacheck/org/scalacheck/Test.scala
+++ b/src/scalacheck/org/scalacheck/Test.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -16,23 +16,120 @@ object Test {
import Prop.FM
import util.CmdLineParser
- /** Test parameters */
+ /** Test parameters used by the `Test.check` method.
+ */
+ trait Parameters {
+ /** The minimum number of tests that must succeed for ScalaCheck to
+ * consider a property passed. */
+ def minSuccessfulTests: Int
+
+ /** The starting size given as parameter to the generators. */
+ def minSize: Int
+
+ /** The maximum size given as parameter to the generators. */
+ def maxSize: Int
+
+ /** The random numbe generator used. */
+ def rng: java.util.Random
+
+ /** The number of tests run in parallell. */
+ def workers: Int
+
+ /** A callback that ScalaCheck calls each time a test is executed. */
+ def testCallback: TestCallback
+
+ /** The maximum ratio between discarded and passed tests allowed before
+ * ScalaCheck gives up and discards the property. At least
+ * `minSuccesfulTests` will always be run, though. */
+ def maxDiscardRatio: Float
+
+ /** A custom class loader that should be used during test execution. */
+ def customClassLoader: Option[ClassLoader]
+
+ // private since we can't guarantee binary compatibility for this one
+ private[scalacheck] def copy(
+ _minSuccessfulTests: Int = Parameters.this.minSuccessfulTests,
+ _minSize: Int = Parameters.this.minSize,
+ _maxSize: Int = Parameters.this.maxSize,
+ _rng: java.util.Random = Parameters.this.rng,
+ _workers: Int = Parameters.this.workers,
+ _testCallback: TestCallback = Parameters.this.testCallback,
+ _maxDiscardRatio: Float = Parameters.this.maxDiscardRatio,
+ _customClassLoader: Option[ClassLoader] = Parameters.this.customClassLoader
+ ): Parameters = new Parameters {
+ val minSuccessfulTests: Int = _minSuccessfulTests
+ val minSize: Int = _minSize
+ val maxSize: Int = _maxSize
+ val rng: java.util.Random = _rng
+ val workers: Int = _workers
+ val testCallback: TestCallback = _testCallback
+ val maxDiscardRatio: Float = _maxDiscardRatio
+ val customClassLoader: Option[ClassLoader] = _customClassLoader
+ }
+ }
+
+ /** Test parameters used by the `Test.check` method.
+ *
+ * To override default values, extend the
+ * [[org.scalacheck.Test.Parameters.Default]] trait:
+ *
+ * {{{
+ * val myParams = new Parameters.Default {
+ * override val minSuccesfulTests = 600
+ * override val maxDiscardRatio = 8
+ * }
+ * }}}
+ */
+ object Parameters {
+ /** Default test parameters trait. This can be overriden if you need to
+ * tweak the parameters. */
+ trait Default extends Parameters {
+ val minSuccessfulTests: Int = 100
+ val minSize: Int = 0
+ val maxSize: Int = Gen.Params().size
+ val rng: java.util.Random = Gen.Params().rng
+ val workers: Int = 1
+ val testCallback: TestCallback = new TestCallback {}
+ val maxDiscardRatio: Float = 5
+ val customClassLoader: Option[ClassLoader] = None
+ }
+
+ /** Default test parameters instance. */
+ val default: Parameters = new Default {}
+ }
+
+ /** Test parameters
+ * @deprecated (in 1.10.0) Use [[org.scalacheck.Test.Parameters]] instead.
+ */
+ @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
case class Params(
minSuccessfulTests: Int = 100,
-
- /** @deprecated Use maxDiscardRatio instead. */
- @deprecated("Use maxDiscardRatio instead.", "1.10")
maxDiscardedTests: Int = -1,
-
minSize: Int = 0,
maxSize: Int = Gen.Params().size,
rng: java.util.Random = Gen.Params().rng,
workers: Int = 1,
- testCallback: TestCallback = new TestCallback {},
- maxDiscardRatio: Float = 5,
- customClassLoader: Option[ClassLoader] = None
+ testCallback: TestCallback = new TestCallback {}
)
+ @deprecated("Use [[org.scalacheck.Test.Parameters]] instead", "1.10.0")
+ private def paramsToParameters(params: Params) = new Parameters {
+ val minSuccessfulTests = params.minSuccessfulTests
+ val minSize = params.minSize
+ val maxSize = params.maxSize
+ val rng = params.rng
+ val workers = params.workers
+ val testCallback = params.testCallback
+
+ // maxDiscardedTests is deprecated, but if someone
+ // uses it let it override maxDiscardRatio
+ val maxDiscardRatio =
+ if(params.maxDiscardedTests < 0) Parameters.default.maxDiscardRatio
+ else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
+
+ val customClassLoader = Parameters.default.customClassLoader
+ }
+
/** Test statistics */
case class Result(status: Status, succeeded: Int, discarded: Int, freqMap: FM, time: Long = 0) {
def passed = status match {
@@ -92,7 +189,7 @@ object Test {
}
}
- private def assertParams(prms: Params) = {
+ private def assertParams(prms: Parameters) = {
import prms._
if(
minSuccessfulTests <= 0 ||
@@ -104,16 +201,24 @@ object Test {
}
private def secure[T](x: => T): Either[T,Throwable] =
- try { Left(x) } catch { case e => Right(e) }
+ try { Left(x) } catch { case e: Throwable => Right(e) }
private[scalacheck] lazy val cmdLineParser = new CmdLineParser {
object OptMinSuccess extends IntOpt {
- val default = Test.Params().minSuccessfulTests
+ val default = Parameters.default.minSuccessfulTests
val names = Set("minSuccessfulTests", "s")
val help = "Number of tests that must succeed in order to pass a property"
}
+ object OptMaxDiscarded extends IntOpt {
+ val default = -1
+ val names = Set("maxDiscardedTests", "d")
+ val help =
+ "Number of tests that can be discarded before ScalaCheck stops " +
+ "testing a property. NOTE: this option is deprecated, please use " +
+ "the option maxDiscardRatio (-r) instead."
+ }
object OptMaxDiscardRatio extends FloatOpt {
- val default = Test.Params().maxDiscardRatio
+ val default = Parameters.default.maxDiscardRatio
val names = Set("maxDiscardRatio", "r")
val help =
"The maximum ratio between discarded and succeeded tests " +
@@ -121,17 +226,17 @@ object Test {
"least minSuccessfulTests will always be tested, though."
}
object OptMinSize extends IntOpt {
- val default = Test.Params().minSize
+ val default = Parameters.default.minSize
val names = Set("minSize", "n")
val help = "Minimum data generation size"
}
object OptMaxSize extends IntOpt {
- val default = Test.Params().maxSize
+ val default = Parameters.default.maxSize
val names = Set("maxSize", "x")
val help = "Maximum data generation size"
}
object OptWorkers extends IntOpt {
- val default = Test.Params().workers
+ val default = Parameters.default.workers
val names = Set("workers", "w")
val help = "Number of threads to execute in parallel for testing"
}
@@ -142,54 +247,63 @@ object Test {
}
val opts = Set[Opt[_]](
- OptMinSuccess, OptMaxDiscardRatio, OptMinSize,
+ OptMinSuccess, OptMaxDiscarded, OptMaxDiscardRatio, OptMinSize,
OptMaxSize, OptWorkers, OptVerbosity
)
def parseParams(args: Array[String]) = parseArgs(args) {
- optMap => Test.Params(
- minSuccessfulTests = optMap(OptMinSuccess),
- maxDiscardRatio = optMap(OptMaxDiscardRatio),
- minSize = optMap(OptMinSize),
- maxSize = optMap(OptMaxSize),
- rng = Test.Params().rng,
- workers = optMap(OptWorkers),
- testCallback = ConsoleReporter(optMap(OptVerbosity))
+ optMap => Parameters.default.copy(
+ _minSuccessfulTests = optMap(OptMinSuccess),
+ _maxDiscardRatio =
+ if (optMap(OptMaxDiscarded) < 0) optMap(OptMaxDiscardRatio)
+ else optMap(OptMaxDiscarded).toFloat / optMap(OptMinSuccess),
+ _minSize = optMap(OptMinSize),
+ _maxSize = optMap(OptMaxSize),
+ _workers = optMap(OptWorkers),
+ _testCallback = ConsoleReporter(optMap(OptVerbosity))
)
}
}
/** Tests a property with the given testing parameters, and returns
- * the test results. */
+ * the test results.
+ * @deprecated (in 1.10.0) Use
+ * `check(Parameters, Properties)` instead.
+ */
+ @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
def check(params: Params, p: Prop): Result = {
+ check(paramsToParameters(params), p)
+ }
- // maxDiscardedTests is deprecated, but if someone
- // uses it let it override maxDiscardRatio
- val mdr =
- if(params.maxDiscardedTests < 0) params.maxDiscardRatio
- else (params.maxDiscardedTests: Float)/(params.minSuccessfulTests: Float)
- val prms = params.copy( maxDiscardRatio = mdr)
-
- import prms._
- import scala.actors.Futures.future
+ /** Tests a property with the given testing parameters, and returns
+ * the test results. */
+ def check(params: Parameters, p: Prop): Result = {
+ import params._
- assertParams(prms)
- if(workers > 1)
+ assertParams(params)
+ if(workers > 1) {
assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded")
+ }
val iterations = math.ceil(minSuccessfulTests / (workers: Double))
val sizeStep = (maxSize-minSize) / (iterations*workers)
var stop = false
- def worker(workerIdx: Int) = future {
- params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ def worker(workerIdx: Int) =
+ if (workers < 2) () => workerFun(workerIdx)
+ else actors.Futures.future {
+ params.customClassLoader.map(Thread.currentThread.setContextClassLoader(_))
+ workerFun(workerIdx)
+ }
+
+ def workerFun(workerIdx: Int) = {
var n = 0 // passed tests
var d = 0 // discarded tests
var res: Result = null
var fm = FreqMap.empty[immutable.Set[Any]]
while(!stop && res == null && n < iterations) {
val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d))))
- val propPrms = Prop.Params(Gen.Params(size.round.toInt, prms.rng), fm)
+ val propPrms = Prop.Params(Gen.Params(size.round.toInt, params.rng), fm)
secure(p(propPrms)) match {
case Right(e) => res =
Result(GenException(e), n, d, FreqMap.empty[immutable.Set[Any]])
@@ -250,11 +364,20 @@ object Test {
stop = true
results foreach (_.apply())
val timedRes = r.copy(time = System.currentTimeMillis-start)
- prms.testCallback.onTestResult("", timedRes)
+ params.testCallback.onTestResult("", timedRes)
timedRes
}
+ /** Check a set of properties.
+ * @deprecated (in 1.10.0) Use
+ * `checkProperties(Parameters, Properties)` instead.
+ */
+ @deprecated("Use 'checkProperties(Parameters, Properties)' instead", "1.10.0")
def checkProperties(prms: Params, ps: Properties): Seq[(String,Result)] =
+ checkProperties(paramsToParameters(prms), ps)
+
+ /** Check a set of properties. */
+ def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] =
ps.properties.map { case (name,p) =>
val testCallback = new TestCallback {
override def onPropEval(n: String, t: Int, s: Int, d: Int) =
@@ -262,7 +385,7 @@ object Test {
override def onTestResult(n: String, r: Result) =
prms.testCallback.onTestResult(name,r)
}
- val res = check(prms copy (testCallback = testCallback), p)
+ val res = check(prms copy (_testCallback = testCallback), p)
(name,res)
}
diff --git a/src/scalacheck/org/scalacheck/util/Buildable.scala b/src/scalacheck/org/scalacheck/util/Buildable.scala
index 221b8a61c3..140c541a95 100644
--- a/src/scalacheck/org/scalacheck/util/Buildable.scala
+++ b/src/scalacheck/org/scalacheck/util/Buildable.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -10,7 +10,6 @@
package org.scalacheck.util
import scala.collection._
-import scala.reflect.ClassTag
trait Buildable[T,C[_]] {
def builder: mutable.Builder[T,C[T]]
@@ -31,7 +30,7 @@ object Buildable {
def builder = (new mutable.ListBuffer[T]).mapResult(_.toStream)
}
- implicit def buildableArray[T](implicit cm: ClassTag[T]) =
+ implicit def buildableArray[T](implicit cm: ClassManifest[T]) =
new Buildable[T,Array] {
def builder = mutable.ArrayBuilder.make[T]
}
diff --git a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
index 4683c34a65..eb3a91fe59 100644
--- a/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
+++ b/src/scalacheck/org/scalacheck/util/CmdLineParser.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
@@ -30,7 +30,7 @@ trait CmdLineParser extends Parsers {
trait StrOpt extends Opt[String]
class OptMap {
- private val opts = new scala.collection.mutable.HashMap[Opt[_], Any]
+ private val opts = new collection.mutable.HashMap[Opt[_], Any]
def apply(flag: Flag): Boolean = opts.contains(flag)
def apply[T](opt: Opt[T]): T = opts.get(opt) match {
case None => opt.default
diff --git a/src/scalacheck/org/scalacheck/util/FreqMap.scala b/src/scalacheck/org/scalacheck/util/FreqMap.scala
index c7474d3b87..d0686aec72 100644
--- a/src/scalacheck/org/scalacheck/util/FreqMap.scala
+++ b/src/scalacheck/org/scalacheck/util/FreqMap.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/scalacheck/org/scalacheck/util/StdRand.scala b/src/scalacheck/org/scalacheck/util/StdRand.scala
index 317b0ccd10..7c1dc8dcc4 100644
--- a/src/scalacheck/org/scalacheck/util/StdRand.scala
+++ b/src/scalacheck/org/scalacheck/util/StdRand.scala
@@ -1,6 +1,6 @@
/*-------------------------------------------------------------------------*\
** ScalaCheck **
-** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
+** Copyright (c) 2007-2013 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
diff --git a/src/compiler/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala
index 7fc811788e..fd6d637212 100644
--- a/src/compiler/scala/tools/ant/Scaladoc.scala
+++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala
@@ -78,7 +78,7 @@ class Scaladoc extends ScalaMatchingTask {
val values = List("yes", "no", "on", "off")
def getBooleanValue(value: String, flagName: String): Boolean =
if (Flag.isPermissible(value))
- return ("yes".equals(value) || "on".equals(value))
+ ("yes".equals(value) || "on".equals(value))
else
buildError("Unknown " + flagName + " flag '" + value + "'")
}
@@ -563,7 +563,7 @@ class Scaladoc extends ScalaMatchingTask {
/** Transforms a file into a Scalac-readable string.
*
- * @param path A file to convert.
+ * @param file A file to convert.
* @return A string-representation of the file like `/x/k/a.scala`.
*/
private def asString(file: File): String =
diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
index ba434bc797..52a0c20a11 100644
--- a/src/compiler/scala/tools/nsc/ScalaDoc.scala
+++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala
@@ -10,7 +10,6 @@ import java.io.File.pathSeparator
import scala.tools.nsc.doc.DocFactory
import scala.tools.nsc.reporters.ConsoleReporter
import scala.reflect.internal.util.FakePos
-import Properties.msilLibPath
/** The main class for scaladoc, a front-end for the Scala compiler
* that generates documentation from source files.
@@ -42,12 +41,8 @@ class ScalaDoc {
reporter.warning(null, "Phases are restricted when using Scaladoc")
else if (docSettings.help.value || !hasFiles)
reporter.echo(command.usageMsg)
- else try {
- if (docSettings.target.value == "msil")
- msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x))
-
- new DocFactory(reporter, docSettings) document command.files
- }
+ else
+ try { new DocFactory(reporter, docSettings) document command.files }
catch {
case ex @ FatalError(msg) =>
if (docSettings.debug.value) ex.printStackTrace()
@@ -65,7 +60,7 @@ object ScalaDoc extends ScalaDoc {
class Command(arguments: List[String], settings: doc.Settings) extends CompilerCommand(arguments, settings) {
override def cmdName = "scaladoc"
override def usageMsg = (
- createUsageMsg("where possible scaladoc", false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
+ createUsageMsg("where possible scaladoc", shouldExplain = false, x => x.isStandard && settings.isScaladocSpecific(x.name)) +
"\n\nStandard scalac options also available:" +
createUsageMsg(x => x.isStandard && !settings.isScaladocSpecific(x.name))
)
diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
index a091b04993..4607684c0d 100644
--- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala
@@ -8,15 +8,13 @@ package doc
import scala.util.control.ControlThrowable
import reporters.Reporter
-import scala.reflect.internal.util.{ NoPosition, BatchSourceFile}
-import io.{ File, Directory }
-import DocParser.Parsed
+import scala.reflect.internal.util.BatchSourceFile
/** A documentation processor controls the process of generating Scala
* documentation, which is as follows.
*
* * A simplified compiler instance (with only the front-end phases enabled)
- * * is created, and additional ''sourceless'' comments are registered.
+ * * is created, and additional `sourceless` comments are registered.
* * Documentable files are compiled, thereby filling the compiler's symbol table.
* * A documentation model is extracted from the post-compilation symbol table.
* * A generator is used to transform the model into the correct final format (HTML).
@@ -33,15 +31,7 @@ import DocParser.Parsed
* @author Gilles Dubochet */
class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor =>
/** The unique compiler instance used by this processor and constructed from its `settings`. */
- object compiler extends Global(settings, reporter) with interactive.RangePositions {
- override protected def computeInternalPhases() {
- phasesSet += syntaxAnalyzer
- phasesSet += analyzer.namerFactory
- phasesSet += analyzer.packageObjects
- phasesSet += analyzer.typerFactory
- }
- override def forScaladoc = true
- }
+ object compiler extends ScaladocGlobal(settings, reporter)
/** Creates a scaladoc site for all symbols defined in this call's `source`,
* as well as those defined in `sources` of previous calls to the same processor.
@@ -104,7 +94,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
val documentError: PartialFunction[Throwable, Unit] = {
case NoCompilerRunException =>
- reporter.info(null, "No documentation generated with unsucessful compiler run", false)
+ reporter.info(null, "No documentation generated with unsucessful compiler run", force = false)
case _: ClassNotFoundException =>
()
}
@@ -128,7 +118,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
}
case _ => ()
}
- docletInstance.generate
+ docletInstance.generate()
}
try generate()
@@ -136,7 +126,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor
}
private[doc] def docdbg(msg: String) {
- if (settings.Ydocdebug.value)
+ if (settings.Ydocdebug)
println(msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
index 27c995e1c3..6dc3e5a62b 100644
--- a/src/compiler/scala/tools/nsc/doc/DocParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala
@@ -9,24 +9,19 @@ package doc
import reporters._
import scala.reflect.internal.util._
-import interactive.RangePositions
import DocParser.Parsed
/** A very minimal global customized for extracting `DocDefs`. It stops
* right after parsing so it can read `DocDefs` from source code which would
* otherwise cause the compiler to go haywire.
*/
-class DocParser(settings: nsc.Settings, reporter: Reporter)
- extends Global(settings, reporter)
- with RangePositions {
-
+class DocParser(settings: nsc.Settings, reporter: Reporter) extends Global(settings, reporter) {
def this(settings: Settings) = this(settings, new ConsoleReporter(settings))
def this() = this(new Settings(Console println _))
// the usual global initialization
locally { new Run() }
- override def forScaladoc = true
override protected def computeInternalPhases() {
phasesSet += syntaxAnalyzer
}
@@ -47,7 +42,7 @@ class DocParser(settings: nsc.Settings, reporter: Reporter)
*/
def docUnit(code: String) = {
val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
- val scanner = new syntaxAnalyzer.UnitParser(unit)
+ val scanner = newUnitParser(unit)
scanner.compilationUnit()
}
diff --git a/src/compiler/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala
index f9b9eecdb3..f9b9eecdb3 100644
--- a/src/compiler/scala/tools/nsc/doc/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
new file mode 100644
index 0000000000..d407b93a4b
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala
@@ -0,0 +1,263 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import scala.reflect.internal.Chars._
+import symtab._
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo, LinkToExternal }
+import scala.language.postfixOps
+
+trait ScaladocAnalyzer extends Analyzer {
+ val global : Global // generally, a ScaladocGlobal
+ import global._
+
+ override def newTyper(context: Context): ScaladocTyper = new Typer(context) with ScaladocTyper
+
+ trait ScaladocTyper extends Typer {
+ private def unit = context.unit
+
+ override def canAdaptConstantTypeToLiteral = false
+
+ override protected def macroImplementationNotFoundMessage(name: Name): String = (
+ super.macroImplementationNotFoundMessage(name)
+ + "\nWhen generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ )
+
+ override def typedDocDef(docDef: DocDef, mode: Mode, pt: Type): Tree = {
+ val sym = docDef.symbol
+
+ if ((sym ne null) && (sym ne NoSymbol)) {
+ val comment = docDef.comment
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ val typer1 = newTyper(context.makeNewScope(docDef, context.owner))
+ for (useCase <- comment.useCases) {
+ typer1.silent(_ => typer1 defineUseCases useCase) match {
+ case SilentTypeError(err) =>
+ unit.warning(useCase.pos, err.errMsg)
+ case _ =>
+ }
+ for (useCaseSym <- useCase.defined) {
+ if (sym.name != useCaseSym.name)
+ unit.warning(useCase.pos, "@usecase " + useCaseSym.name.decode + " does not match commented symbol: " + sym.name.decode)
+ }
+ }
+ }
+
+ super.typedDocDef(docDef, mode, pt)
+ }
+
+ def defineUseCases(useCase: UseCase): List[Symbol] = {
+ def stringParser(str: String): syntaxAnalyzer.Parser = {
+ val file = new BatchSourceFile(context.unit.source.file, str) {
+ override def positionInUltimateSource(pos: Position) = {
+ pos.withSource(context.unit.source, useCase.pos.start)
+ }
+ }
+ newUnitParser(new CompilationUnit(file))
+ }
+
+ val trees = stringParser(useCase.body+";").nonLocalDefOrDcl
+ val enclClass = context.enclClass.owner
+
+ def defineAlias(name: Name) = (
+ if (context.scope.lookup(name) == NoSymbol) {
+ lookupVariable(name.toString.substring(1), enclClass) foreach { repl =>
+ silent(_.typedTypeConstructor(stringParser(repl).typ())) map { tpt =>
+ val alias = enclClass.newAliasType(name.toTypeName, useCase.pos)
+ val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias)
+ val newInfo = genPolyType(tparams, appliedType(tpt.tpe, tparams map (_.tpe)))
+ alias setInfo newInfo
+ context.scope.enter(alias)
+ }
+ }
+ }
+ )
+
+ for (tree <- trees; t <- tree)
+ t match {
+ case Ident(name) if name startsWith '$' => defineAlias(name)
+ case _ =>
+ }
+
+ useCase.aliases = context.scope.toList
+ namer.enterSyms(trees)
+ typedStats(trees, NoSymbol)
+ useCase.defined = context.scope.toList filterNot (useCase.aliases contains _)
+
+ if (settings.debug)
+ useCase.defined foreach (sym => println("defined use cases: %s:%s".format(sym, sym.tpe)))
+
+ useCase.defined
+ }
+ }
+}
+
+abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer {
+ import global._
+
+ class ScaladocJavaUnitParser(unit: CompilationUnit) extends {
+ override val in = new ScaladocJavaUnitScanner(unit)
+ } with JavaUnitParser(unit) { }
+
+ class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) {
+ /** buffer for the documentation comment
+ */
+ var docBuffer: StringBuilder = null
+
+ /** add the given character to the documentation buffer
+ */
+ protected def putDocChar(c: Char) {
+ if (docBuffer ne null) docBuffer.append(c)
+ }
+
+ override protected def skipComment(): Boolean = {
+ if (in.ch == '/') {
+ do {
+ in.next
+ } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU))
+ true
+ } else if (in.ch == '*') {
+ docBuffer = null
+ in.next
+ val scalaDoc = ("/**", "*/")
+ if (in.ch == '*')
+ docBuffer = new StringBuilder(scalaDoc._1)
+ do {
+ do {
+ if (in.ch != '*' && in.ch != SU) {
+ in.next; putDocChar(in.ch)
+ }
+ } while (in.ch != '*' && in.ch != SU)
+ while (in.ch == '*') {
+ in.next; putDocChar(in.ch)
+ }
+ } while (in.ch != '/' && in.ch != SU)
+ if (in.ch == '/') in.next
+ else incompleteInputError("unclosed comment")
+ true
+ } else {
+ false
+ }
+ }
+ }
+
+ class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) {
+
+ private var docBuffer: StringBuilder = null // buffer for comments (non-null while scanning)
+ private var inDocComment = false // if buffer contains double-star doc comment
+ private var lastDoc: DocComment = null // last comment if it was double-star doc
+
+ private object unmooredParser extends { // minimalist comment parser
+ val global: Global = ScaladocSyntaxAnalyzer.this.global
+ }
+ with CommentFactoryBase with MemberLookupBase {
+ import global.{ settings, Symbol }
+ def parseComment(comment: DocComment) = {
+ val nowarnings = settings.nowarn.value
+ settings.nowarn.value = true
+ try parseAtSymbol(comment.raw, comment.raw, comment.pos)
+ finally settings.nowarn.value = nowarnings
+ }
+
+ override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = None
+ override def chooseLink(links: List[LinkTo]): LinkTo = links.headOption orNull
+ override def toString(link: LinkTo): String = "No link"
+ override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = None
+ override def warnNoLink: Boolean = false
+ }
+
+ /**
+ * Warn when discarding buffered doc at the end of a block.
+ * This mechanism doesn't warn about arbitrary unmoored doc.
+ * Also warn under -Xlint, but otherwise only warn in the presence of suspicious
+ * tags that appear to be documenting API. Warnings are suppressed while parsing
+ * the local comment so that comments of the form `[at] Martin` will not trigger a warning.
+ * By omission, tags for `see`, `todo`, `note` and `example` are ignored.
+ */
+ override def discardDocBuffer() = {
+ import scala.tools.nsc.doc.base.comment.Comment
+ val doc = flushDoc
+ // tags that make a local double-star comment look unclean, as though it were API
+ def unclean(comment: Comment): Boolean = {
+ import comment._
+ authors.nonEmpty || result.nonEmpty || throws.nonEmpty || valueParams.nonEmpty ||
+ typeParams.nonEmpty || version.nonEmpty || since.nonEmpty
+ }
+ def isDirty = unclean(unmooredParser parseComment doc)
+ if ((doc ne null) && (settings.lint || isDirty))
+ unit.warning(doc.pos, "discarding unmoored doc comment")
+ }
+
+ override def flushDoc(): DocComment = (try lastDoc finally lastDoc = null)
+
+ override protected def putCommentChar() {
+ if (inDocComment)
+ docBuffer append ch
+
+ nextChar()
+ }
+ override def skipDocComment(): Unit = {
+ inDocComment = true
+ docBuffer = new StringBuilder("/**")
+ super.skipDocComment()
+ }
+ override def skipBlockComment(): Unit = {
+ inDocComment = false
+ docBuffer = new StringBuilder("/*")
+ super.skipBlockComment()
+ }
+ override def skipComment(): Boolean = {
+ // emit a block comment; if it's double-star, make Doc at this pos
+ def foundStarComment(start: Int, end: Int) = try {
+ val str = docBuffer.toString
+ val pos = new RangePosition(unit.source, start, start, end)
+ unit.comment(pos, str)
+ if (inDocComment)
+ lastDoc = DocComment(str, pos)
+ true
+ } finally {
+ docBuffer = null
+ inDocComment = false
+ }
+ super.skipComment() && ((docBuffer eq null) || foundStarComment(offset, charOffset - 2))
+ }
+ }
+ class ScaladocUnitParser(unit: CompilationUnit, patches: List[BracePatch]) extends UnitParser(unit, patches) {
+ override def newScanner() = new ScaladocUnitScanner(unit, patches)
+ override def withPatches(patches: List[BracePatch]) = new ScaladocUnitParser(unit, patches)
+
+ override def joinComment(trees: => List[Tree]): List[Tree] = {
+ val doc = in.flushDoc
+ if ((doc ne null) && doc.raw.length > 0) {
+ log(s"joinComment(doc=$doc)")
+ val joined = trees map {
+ t =>
+ DocDef(doc, t) setPos {
+ if (t.pos.isDefined) {
+ val pos = doc.pos.withEnd(t.pos.endOrPoint)
+ // always make the position transparent
+ pos.makeTransparent
+ } else {
+ t.pos
+ }
+ }
+ }
+ joined.find(_.pos.isOpaqueRange) foreach {
+ main =>
+ val mains = List(main)
+ joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) }
+ }
+ joined
+ }
+ else trees
+ }
+ }
+}
diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
new file mode 100644
index 0000000000..20f24dc753
--- /dev/null
+++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala
@@ -0,0 +1,49 @@
+/* NSC -- new Scala compiler
+ * Copyright 2007-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala.tools.nsc
+package doc
+
+import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch }
+import scala.reflect.internal.Chars._
+import symtab._
+import reporters.Reporter
+import typechecker.Analyzer
+import scala.reflect.internal.util.{ BatchSourceFile, RangePosition }
+
+trait ScaladocGlobalTrait extends Global {
+ outer =>
+
+ override val useOffsetPositions = false
+ override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil)
+
+ override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) {
+ val runsAfter = List[String]()
+ val runsRightAfter = None
+ }
+ override lazy val loaders = new SymbolLoaders {
+ val global: outer.type = outer
+
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ override protected def signalError(root: Symbol, ex: Throwable) {
+ log(s"Suppressing error involving $root: $ex")
+ }
+ }
+}
+
+class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global(settings, reporter) with ScaladocGlobalTrait {
+ override protected def computeInternalPhases() {
+ phasesSet += syntaxAnalyzer
+ phasesSet += analyzer.namerFactory
+ phasesSet += analyzer.packageObjects
+ phasesSet += analyzer.typerFactory
+ }
+ override def forScaladoc = true
+ override lazy val analyzer = new {
+ val global: ScaladocGlobal.this.type = ScaladocGlobal.this
+ } with ScaladocAnalyzer
+}
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
index 8c0628c073..e5dbaa3fd5 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala
@@ -11,7 +11,7 @@ import scala.language.postfixOps
/** An extended version of compiler settings, with additional Scaladoc-specific options.
* @param error A function that prints a string to the appropriate error stream
- * @param print A function that prints the string, without any extra boilerplate of error */
+ * @param printMsg A function that prints the string, without any extra boilerplate of error */
class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) extends scala.tools.nsc.Settings(error) {
/** A setting that defines in which format the documentation is output. ''Note:'' this setting is currently always
@@ -199,22 +199,11 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
"Expand all type aliases and abstract types into full template pages. (locally this can be done with the @template annotation)"
)
- val docExternalUrls = MultiStringSetting (
- "-external-urls",
- "externalUrl(s)",
- "(deprecated) comma-separated list of package_names=doc_URL for external dependencies, where package names are ':'-separated"
- )
-
val docGroups = BooleanSetting (
"-groups",
"Group similar functions together (based on the @group annotation)"
)
- // Somewhere slightly before r18708 scaladoc stopped building unless the
- // self-type check was suppressed. I hijacked the slotted-for-removal-anyway
- // suppress-vt-warnings option and renamed it for this purpose.
- noSelfCheck.value = true
-
// For improved help output.
def scaladocSpecific = Set[Settings#Setting](
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes,
@@ -251,15 +240,6 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
def appendIndex(url: String): String = url.stripSuffix("index.html").stripSuffix("/") + "/index.html"
- // Deprecated together with 'docExternalUrls' option.
- lazy val extUrlPackageMapping: Map[String, String] = (Map.empty[String, String] /: docExternalUrls.value) {
- case (map, binding) =>
- val idx = binding indexOf "="
- val pkgs = binding substring (0, idx) split ":"
- val url = appendIndex(binding substring (idx + 1))
- map ++ (pkgs map (_ -> url))
- }
-
lazy val extUrlMapping: Map[String, String] = docExternalDoc.value flatMap { s =>
val idx = s.indexOf("#")
if (idx > 0) {
@@ -312,10 +292,10 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_))
/** Common conversion targets that affect any class in Scala */
val commonConversionTargets = Set(
- "scala.Predef.any2stringfmt",
- "scala.Predef.any2stringadd",
- "scala.Predef.any2ArrowAssoc",
- "scala.Predef.any2Ensuring",
+ "scala.Predef.StringFormat",
+ "scala.Predef.StringAdd",
+ "scala.Predef.ArrowAssoc",
+ "scala.Predef.Ensuring",
"scala.collection.TraversableOnce.alternateImplicit")
/** There's a reason all these are specialized by hand but documenting each of them is beyond the point */
diff --git a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
index d3e5c869e0..ea45ca1a56 100644
--- a/src/compiler/scala/tools/nsc/doc/Uncompilable.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala
@@ -15,7 +15,7 @@ trait Uncompilable {
val global: Global
val settings: Settings
- import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, Name, DocComment, NoSymbol }
+ import global.{ reporter, inform, warning, newTypeName, newTermName, Symbol, DocComment, NoSymbol }
import global.definitions.AnyRefClass
import global.rootMirror.RootClass
@@ -28,7 +28,7 @@ trait Uncompilable {
lazy val pairs = files flatMap { f =>
val comments = docPairs(f.slurp())
- if (settings.verbose.value)
+ if (settings.verbose)
inform("Found %d doc comments in parse-only file %s: %s".format(comments.size, f, comments.map(_._1).mkString(", ")))
comments
@@ -37,7 +37,7 @@ trait Uncompilable {
def symbols = pairs map (_._1)
def templates = symbols filter (x => x.isClass || x.isTrait || x == AnyRefClass/* which is now a type alias */) toSet
def comments = {
- if (settings.debug.value || settings.verbose.value)
+ if (settings.debug || settings.verbose)
inform("Found %d uncompilable files: %s".format(files.size, files mkString ", "))
if (pairs.isEmpty)
diff --git a/src/compiler/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
index 11520c810e..11520c810e 100644
--- a/src/compiler/scala/tools/nsc/doc/Universe.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/Universe.scala
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index f509c63ba0..cd1d604843 100755
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2007-2012 LAMP/EPFL
+ * Copyright 2007-2013 LAMP/EPFL
* @author Manohar Jonnalagedda
*/
@@ -8,25 +8,21 @@ package doc
package base
import base.comment._
-import reporters.Reporter
import scala.collection._
import scala.util.matching.Regex
-import scala.annotation.switch
-import scala.reflect.internal.util.{NoPosition, Position}
+import scala.reflect.internal.util.Position
import scala.language.postfixOps
/** The comment parser transforms raw comment strings into `Comment` objects.
* Call `parse` to run the parser. Note that the parser is stateless and
* should only be built once for a given Scaladoc run.
*
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
trait CommentFactoryBase { this: MemberLookupBase =>
val global: Global
- import global.{ reporter, definitions, Symbol }
+ import global.{ reporter, Symbol, NoSymbol }
/* Creates comments with necessary arguments */
def createComment (
@@ -66,7 +62,6 @@ trait CommentFactoryBase { this: MemberLookupBase =>
val note = note0
val example = example0
val constructor = constructor0
- val source = source0
val inheritDiagram = inheritDiagram0
val contentDiagram = contentDiagram0
val groupDesc = groupDesc0
@@ -191,7 +186,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
* @param comment The expanded comment string (including start and end markers) to be parsed.
* @param src The raw comment source string.
* @param pos The position of the comment in source. */
- protected def parseAtSymbol(comment: String, src: String, pos: Position, siteOpt: Option[Symbol] = None): Comment = {
+ protected def parseAtSymbol(comment: String, src: String, pos: Position, site: Symbol = NoSymbol): Comment = {
/** The cleaned raw comment as a list of lines. Cleaning removes comment
* start and end markers, line start markers and unnecessary whitespace. */
def clean(comment: String): List[String] = {
@@ -233,11 +228,11 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false)
else if (!before.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: ls, false)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false)
else if (!after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, true)
+ parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true)
else lastTagKey match {
case Some(key) =>
val value =
@@ -245,18 +240,18 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case Some(b :: bs) => (b + endOfLine + marker) :: bs
case None => oops("lastTagKey set when no tag exists for key")
}
- parse0(docBody, tags + (key -> value), lastTagKey, ls, true)
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true)
case None =>
- parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true)
}
case CodeBlockEndRegex(before, marker, after) :: ls =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
+ parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true)
if (!before.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, before :: marker :: ls, true)
+ parse0(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true)
else if (!after.trim.isEmpty)
- parse0(docBody, tags, lastTagKey, marker :: after :: ls, false)
+ parse0(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false)
else lastTagKey match {
case Some(key) =>
val value =
@@ -264,9 +259,9 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case Some(b :: bs) => (b + endOfLine + marker) :: bs
case None => oops("lastTagKey set when no tag exists for key")
}
- parse0(docBody, tags + (key -> value), lastTagKey, ls, false)
+ parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false)
case None =>
- parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
+ parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false)
}
case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
@@ -317,7 +312,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
val bodyTags: mutable.Map[TagKey, List[Body]] =
- mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, siteOpt))} toSeq: _*)
+ mutable.Map(tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(_, pos, site))} toSeq: _*)
def oneTag(key: SimpleTagKey): Option[Body] =
((bodyTags remove key): @unchecked) match {
@@ -350,7 +345,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
val com = createComment (
- body0 = Some(parseWikiAtSymbol(docBody.toString, pos, siteOpt)),
+ body0 = Some(parseWikiAtSymbol(docBody.toString, pos, site)),
authors0 = allTags(SimpleTagKey("author")),
see0 = allTags(SimpleTagKey("see")),
result0 = oneTag(SimpleTagKey("return")),
@@ -380,7 +375,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
- parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false)
+ parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), inCodeBlock = false)
}
@@ -390,14 +385,14 @@ trait CommentFactoryBase { this: MemberLookupBase =>
* - Removed start-of-line star and one whitespace afterwards (if present).
* - Removed all end-of-line whitespace.
* - Only `endOfLine` is used to mark line endings. */
- def parseWikiAtSymbol(string: String, pos: Position, siteOpt: Option[Symbol]): Body = new WikiParser(string, pos, siteOpt).document()
+ def parseWikiAtSymbol(string: String, pos: Position, site: Symbol): Body = new WikiParser(string, pos, site).document()
/** TODO
*
* @author Ingo Maier
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
- protected final class WikiParser(val buffer: String, pos: Position, siteOpt: Option[Symbol]) extends CharReader(buffer) { wiki =>
+ protected final class WikiParser(val buffer: String, pos: Position, site: Symbol) extends CharReader(buffer) { wiki =>
var summaryParsed = false
def document(): Body = {
@@ -456,7 +451,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
else {
jumpWhitespace()
jump(style)
- val p = Paragraph(inline(false))
+ val p = Paragraph(inline(isInlineEnd = false))
blockEnded("end of list line ")
Some(p)
}
@@ -515,11 +510,11 @@ trait CommentFactoryBase { this: MemberLookupBase =>
def para(): Block = {
val p =
if (summaryParsed)
- Paragraph(inline(false))
+ Paragraph(inline(isInlineEnd = false))
else {
val s = summary()
val r =
- if (checkParaEnded) List(s) else List(s, inline(false))
+ if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false))
summaryParsed = true
Paragraph(Chain(r))
}
@@ -683,8 +678,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
def link(): Inline = {
val SchemeUri = """([a-z]+:.*)""".r
jump("[[")
- var parens = 2 + repeatJump('[')
- val start = "[" * parens
+ val parens = 2 + repeatJump('[')
val stop = "]" * parens
//println("link with " + parens + " matching parens")
val target = readUntil { check(stop) || check(" ") }
@@ -700,7 +694,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
case (SchemeUri(uri), optTitle) =>
Link(uri, optTitle getOrElse Text(uri))
case (qualName, optTitle) =>
- makeEntityLink(optTitle getOrElse Text(target), pos, target, siteOpt)
+ makeEntityLink(optTitle getOrElse Text(target), pos, target, site)
}
}
@@ -729,7 +723,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
*/
def normalizeIndentation(_code: String): String = {
- var code = _code.trim
+ val code = _code.trim
var maxSkip = Integer.MAX_VALUE
var crtSkip = 0
var wsArea = true
@@ -884,20 +878,6 @@ trait CommentFactoryBase { this: MemberLookupBase =>
count
}
- final def jumpUntil(chars: String): Int = {
- assert(chars.length > 0)
- var count = 0
- val c = chars.charAt(0)
- while (!check(chars) && char != endOfText) {
- nextChar()
- while (char != c && char != endOfText) {
- nextChar()
- count += 1
- }
- }
- count
- }
-
final def jumpUntil(pred: => Boolean): Int = {
var count = 0
while (!pred && char != endOfText) {
diff --git a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
index c11179800c..3d80f9da52 100755
--- a/src/compiler/scala/tools/nsc/doc/base/LinkTo.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala
@@ -6,8 +6,6 @@ package scala.tools.nsc
package doc
package base
-import scala.collection._
-
sealed trait LinkTo
final case class LinkToMember[Mbr, Tpl](mbr: Mbr, tpl: Tpl) extends LinkTo
final case class LinkToTpl[Tpl](tpl: Tpl) extends LinkTo
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
index cdcfeaae81..cc217d2f80 100755
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -21,10 +21,10 @@ trait MemberLookupBase {
import global._
import rootMirror.{RootPackage, EmptyPackage}
- private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
+ private def isRoot(s: Symbol) = (s eq NoSymbol) || s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
- def makeEntityLink(title: Inline, pos: Position, query: String, siteOpt: Option[Symbol]) =
- new EntityLink(title) { lazy val link = memberLookup(pos, query, siteOpt) }
+ def makeEntityLink(title: Inline, pos: Position, query: String, site: Symbol) =
+ new EntityLink(title) { lazy val link = memberLookup(pos, query, site) }
private var showExplanation = true
private def explanation: String =
@@ -45,18 +45,14 @@ trait MemberLookupBase {
| - you can use \\# to escape hashes, otherwise they will be considered as delimiters, like dots.""".stripMargin
} else ""
- def memberLookup(pos: Position, query: String, siteOpt: Option[Symbol]): LinkTo = {
- var members = breakMembers(query)
+ def memberLookup(pos: Position, query: String, site: Symbol): LinkTo = {
+ val members = breakMembers(query)
// (1) First look in the root package, as most of the links are qualified
val fromRoot = lookupInRootPackage(pos, members)
// (2) Or recursively go into each containing template.
- val fromParents = siteOpt.fold(Stream.empty[Symbol]) { s =>
- Stream.iterate(s)(_.owner)
- }.takeWhile (!isRoot(_)).map {
- lookupInTemplate(pos, members, _)
- }
+ val fromParents = Stream.iterate(site)(_.owner) takeWhile (!isRoot(_)) map (lookupInTemplate(pos, members, _))
val syms = (fromRoot +: fromParents) find (!_.isEmpty) getOrElse Nil
@@ -88,7 +84,7 @@ trait MemberLookupBase {
// (4) if we still haven't found anything, create a tooltip
Tooltip(query)
case List(l) => l
- case links =>
+ case links =>
val chosen = chooseLink(links)
def linkToString(link: LinkTo) = {
val chosenInfo =
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
index eb0d751f3e..ac5fec80b3 100755
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala
@@ -10,8 +10,6 @@ package comment
import scala.collection._
-import java.net.URL
-
/** A body of text. A comment has a single body, which is composed of
* at least one block. Inside every body is exactly one summary (see
* [[scala.tools.nsc.doc.model.comment.Summary]]). */
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
index 2b28164ca4..a3d05ae50b 100644
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Comment.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala
@@ -102,9 +102,6 @@ abstract class Comment {
/** A usage example related to the entity. */
def example: List[Body]
- /** The comment as it appears in the source text. */
- def source: Option[String]
-
/** A description for the primary constructor */
def constructor: Option[Body]
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
index 735b79c336..42b56aa927 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Generator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala
@@ -21,7 +21,7 @@ abstract class Generator {
/** Outputs documentation (as a side effect). */
def generate(): Unit = {
assert(checks forall { check => check() })
- generateImpl
+ generateImpl()
}
/** Outputs documentation (as a side effect). This method is called only if all `checks` are true. */
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
index 0cdd47182f..0cdd47182f 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Indexer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Indexer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
index ee8b7809e5..ee8b7809e5 100644
--- a/src/compiler/scala/tools/nsc/doc/doclet/Universer.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
index 3aa3e87554..21c5f6bb67 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Doclet.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala
@@ -13,7 +13,7 @@ import doclet._
class Doclet extends Generator with Universer with Indexer {
def generateImpl() {
- new html.HtmlFactory(universe, index).generate
+ new html.HtmlFactory(universe, index).generate()
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
index 4630c3dda8..d721a96ad7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -103,7 +103,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
/** Generates the Scaladoc site for a model into the site root.
* A scaladoc site is a set of HTML and related files
* that document a model extracted from a compiler run.
- * @param model The model to generate in the form of a sequence of packages. */
+ */
def generate() {
def copyResource(subPath: String) {
@@ -111,7 +111,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
val p = "/scala/tools/nsc/doc/html/resource/" + subPath
val inputStream = getClass.getResourceAsStream(p)
assert(inputStream != null, p)
- }.toByteArray
+ }.toByteArray()
val dest = Directory(siteRoot) / subPath
dest.parent.createDirectory()
val out = dest.toFile.bufferedOutput()
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
index 69da322418..159e16375c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -3,7 +3,9 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
@@ -11,7 +13,7 @@ import base._
import base.comment._
import model._
-import scala.xml.{XML, NodeSeq}
+import scala.xml.NodeSeq
import scala.xml.dtd.{DocType, PublicID}
import scala.collection._
import java.io.Writer
@@ -60,7 +62,7 @@ abstract class HtmlPage extends Page { thisPage =>
w.write(xml.Xhtml.toXhtml(html))
}
- if (site.universe.settings.docRawOutput.value)
+ if (site.universe.settings.docRawOutput)
writeFile(site, ".raw") {
// we're only interested in the body, as this will go into the diff
_.write(body.text)
@@ -123,7 +125,7 @@ abstract class HtmlPage extends Page { thisPage =>
case Text(text) => scala.xml.Text(text)
case Summary(in) => inlineToHtml(in)
case HtmlTag(tag) => scala.xml.Unparsed(tag)
- case EntityLink(target, link) => linkToHtml(target, link, true)
+ case EntityLink(target, link) => linkToHtml(target, link, hasLinks = true)
}
def linkToHtml(text: Inline, link: LinkTo, hasLinks: Boolean) = link match {
diff --git a/src/compiler/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
index 62166f7def..93950fd0a7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/Page.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala
@@ -3,7 +3,8 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc.doc.html
+package scala
+package tools.nsc.doc.html
import scala.tools.nsc.doc.model._
import java.io.{FileOutputStream, File}
@@ -45,7 +46,7 @@ abstract class Page {
/** Writes this page as a file. The file's location is relative to the
* generator's site root, and the encoding is also defined by the generator.
- * @param generator The generator that is writing this page. */
+ * @param site The generator that is writing this page. */
def writeFor(site: HtmlFactory): Unit
def kindToString(mbr: MemberEntity) =
@@ -84,16 +85,10 @@ abstract class Page {
}
/** A relative link from this page to some destination class entity.
- * @param destEntity The class or object entity that the link will point to. */
+ * @param destClass The class or object entity that the link will point to. */
def relativeLinkTo(destClass: TemplateEntity): String =
relativeLinkTo(templateToPath(destClass))
- /** A relative link from this page to some destination page in the Scaladoc site.
- * @param destPage The page that the link will point to. */
- def relativeLinkTo(destPage: HtmlPage): String = {
- relativeLinkTo(destPage.path)
- }
-
/** A relative link from this page to some destination path.
* @param destPath The path that the link will point to. */
def relativeLinkTo(destPath: List[String]): String = {
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
index 6fdaaed75f..910148532d 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -3,7 +3,8 @@
* @author Stephane Micheloud
*/
-package scala.tools.nsc.doc.html
+package scala
+package tools.nsc.doc.html
import scala.xml.NodeSeq
import scala.annotation.tailrec
@@ -30,8 +31,8 @@ private[html] object SyntaxHigh {
/** Annotations, sorted alphabetically */
val annotations = Array(
"BeanProperty", "SerialVersionUID",
- "beanGetter", "beanSetter", "bridge", "cloneable",
- "deprecated", "deprecatedName",
+ "beanGetter", "beanSetter", "bridge",
+ "deprecated", "deprecatedName", "deprecatedOverriding", "deprecatedInheritance",
"elidable", "field", "getter", "inline",
"migration", "native", "noinline", "param",
"remote", "setter", "specialized", "strictfp", "switch",
@@ -41,7 +42,7 @@ private[html] object SyntaxHigh {
/** Standard library classes/objects, sorted alphabetically */
val standards = Array (
- "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Application", "Array",
+ "WeakTypeTag", "Any", "AnyRef", "AnyVal", "App", "Array",
"Boolean", "Byte", "Char", "Class", "ClassTag", "ClassManifest",
"Console", "Double", "Enumeration", "Float", "Function", "Int",
"List", "Long", "Manifest", "Map",
@@ -108,14 +109,14 @@ private[html] object SyntaxHigh {
case '/' =>
if (star) {
if (level > 0) level -= 1
- if (level == 0) i else multiline(i+1, true)
+ if (level == 0) i else multiline(i+1, star = true)
} else
- multiline(i+1, false)
+ multiline(i+1, star = false)
case _ =>
- multiline(i+1, false)
+ multiline(i+1, star = false)
}
}
- if (buf(i) == '/') line(i) else multiline(i, true)
+ if (buf(i) == '/') line(i) else multiline(i, star = true)
out.toString
}
@@ -130,16 +131,16 @@ private[html] object SyntaxHigh {
out append ch
ch match {
case '\\' =>
- charlit0(i+1, true)
+ charlit0(i+1, bslash = true)
case '\'' if !bslash =>
i
case _ =>
- if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, true)
- else charlit0(i+1, false)
+ if (bslash && '0' <= ch && ch <= '9') charlit0(i+1, bslash = true)
+ else charlit0(i+1, bslash = false)
}
}
}
- charlit0(j, false)
+ charlit0(j, bslash = false)
out.toString
}
@@ -151,14 +152,14 @@ private[html] object SyntaxHigh {
out append ch
ch match {
case '\\' =>
- strlit0(i+1, true)
+ strlit0(i+1, bslash = true)
case '"' if !bslash =>
i
case _ =>
- strlit0(i+1, false)
+ strlit0(i+1, bslash = false)
}
}
- strlit0(i, false)
+ strlit0(i, bslash = false)
out.toString
}
@@ -184,7 +185,7 @@ private[html] object SyntaxHigh {
ch match {
case 'e' | 'E' =>
out append ch
- expo(i+1, false)
+ expo(i+1, signed = false)
case _ =>
if (Character.isDigit(ch)) {
out append ch
@@ -198,7 +199,7 @@ private[html] object SyntaxHigh {
ch match {
case '+' | '-' if !signed =>
out append ch
- expo(i+1, true)
+ expo(i+1, signed = true)
case _ =>
if (Character.isDigit(ch)) {
out append ch
@@ -259,8 +260,8 @@ private[html] object SyntaxHigh {
parse(buf(i).toChar.toString, i+1)
case _ =>
if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
- if (Character.isDigit(buf(i)) ||
- (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
+ if (Character.isDigit(buf(i).toInt) ||
+ (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1).toInt))) {
val s = numlit(i)
parse("<span class=\"num\">"+s+"</span>", i+s.length)
} else {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
index 8802d7c35c..c034647320 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Index.scala
@@ -9,10 +9,8 @@ package html
package page
import model._
-
import scala.collection._
import scala.xml._
-import scala.util.parsing.json.{JSONObject, JSONArray}
class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
@@ -27,19 +25,13 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
val headers =
<xml:group>
<link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
</xml:group>
- private val scripts = {
- val sources =
- (List("jquery.js", "jquery-ui.js", "jquery.layout.js", "scheduler.js", "index.js").map {
- x => relativeLinkTo(List(x, "lib"))
- }) :+ "index.js"
-
- sources map {
- src => <script defer="defer" type="text/javascript" src={src}></script>
- }
- }
-
val body =
<body>
<div id="library">
@@ -52,7 +44,6 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
<div id="content" class="ui-layout-center">
<iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
- { scripts }
</body>
def letters: NodeSeq =
@@ -132,7 +123,7 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
</xml:group>
}
packageElem(universe.rootPackage)
- }</div></div>
+ }</div></div><script src="index.js"></script>
</div>
def packageQualifiedName(ety: DocTemplateEntity): String =
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
index a205e02533..e3c94505ab 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/IndexScript.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala
@@ -8,7 +8,6 @@ package scala.tools.nsc.doc.html.page
import scala.tools.nsc.doc
import scala.tools.nsc.doc.model.{Package, DocTemplateEntity}
import scala.tools.nsc.doc.html.{Page, HtmlFactory}
-import java.nio.channels.Channels
import scala.util.parsing.json.{JSONObject, JSONArray}
class IndexScript(universe: doc.Universe, index: doc.Index) extends Page {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
index a74c2eedbd..84ee82f994 100755
--- a/src/compiler/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/ReferenceIndex.scala
@@ -3,10 +3,13 @@
* @author Pedro Furlanetto
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
package page
+
import doc.model._
class ReferenceIndex(letter: Char, index: doc.Index, universe: Universe) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
index 68289b7474..37145756d9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Source.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Source.scala
@@ -8,8 +8,7 @@ package doc
package html
package page
-import model._
-import scala.xml.{NodeSeq, Unparsed}
+import scala.xml.NodeSeq
import java.io.File
class Source(sourceFile: File) extends HtmlPage {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
index 63c77e7bb3..119d4e0143 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala
@@ -3,7 +3,9 @@
* @author David Bernard, Manohar Jonnalagedda
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
package page
@@ -13,10 +15,9 @@ import base.comment._
import model._
import model.diagram._
-import diagram._
-
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
import scala.language.postfixOps
+import scala.collection.mutable. { Set, HashSet }
import model._
import model.diagram._
@@ -40,6 +41,14 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<xml:group>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
<link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
+ { if (universe.settings.docDiagrams.value) {
+ <script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
+ <script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
+ } else NodeSeq.Empty }
<script type="text/javascript">
if(top === self) {{
var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
@@ -53,22 +62,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</script>
</xml:group>
- private val scripts = {
- val sources = {
- val default = List("jquery.js", "jquery-ui.js", "tools.tooltip.js", "template.js")
- val forDiagrams = List("modernizr.custom.js", "diagrams.js")
-
- (default ++ (if (universe.settings.docDiagrams.value) forDiagrams else Nil)) map {
- x => x.replace('.', '-') -> relativeLinkTo(List(x, "lib"))
- }
- }
-
- sources map {
- case (id, src) =>
- <script defer="defer" type="text/javascript" id={id} src={src}></script>
- }
- }
-
val valueMembers =
tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
@@ -120,8 +113,8 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<h1>{ displayName }</h1>
</div>
- { signature(tpl, true) }
- { memberToCommentHtml(tpl, tpl.inTemplate, true) }
+ { signature(tpl, isSelf = true) }
+ { memberToCommentHtml(tpl, tpl.inTemplate, isSelf = true) }
<div id="mbrsel">
<div id='textfilter'><span class='pre'/><span class='input'><input id='mbrsel-input' type='text' accesskey='/'/></span><span class='post'/></div>
@@ -252,7 +245,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
NodeSeq fromSeq (for (conversion <- (tpl.conversions)) yield
<div class="conversion" name={ conversion.conversionQualifiedName }>
<h3>Inherited by implicit conversion { conversion.conversionShortName } from
- { typeToHtml(tpl.resultType, true) } to { typeToHtml(conversion.targetType, true) }
+ { typeToHtml(tpl.resultType, hasLinks = true) } to { typeToHtml(conversion.targetType, hasLinks = true) }
</h3>
</div>
)
@@ -288,19 +281,20 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else
<div id="footer"> { tpl.universe.settings.docfooter.value } </div>
}
- { scripts }
+
+
</body>
}
def memberToHtml(mbr: MemberEntity, inTpl: DocTemplateEntity): NodeSeq = {
- val memberComment = memberToCommentHtml(mbr, inTpl, false)
+ val memberComment = memberToCommentHtml(mbr, inTpl, isSelf = false)
<li name={ mbr.definitionName } visbl={ if (mbr.visibility.isProtected) "prt" else "pub" }
data-isabs={ mbr.isAbstract.toString }
fullComment={ if(memberComment.filter(_.label=="div").isEmpty) "no" else "yes" }
group={ mbr.group }>
<a id={ mbr.signature }/>
<a id={ mbr.signatureCompat }/>
- { signature(mbr, false) }
+ { signature(mbr, isSelf = false) }
{ memberComment }
</li>
}
@@ -407,7 +401,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case Some(conv) =>
<dt class="implicit">Implicit information</dt> ++
{
- val targetType = typeToHtml(conv.targetType, true)
+ val targetType = typeToHtml(conv.targetType, hasLinks = true)
val conversionMethod = conv.convertorMethod match {
case Left(member) => Text(member.name)
case Right(name) => Text(name)
@@ -433,7 +427,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
<dd>
- This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, true) } to
+ This member is added by an implicit conversion from { typeToHtml(inTpl.resultType, hasLinks = true) } to
{ targetType } performed by method { conversionMethod } in { conversionOwner }.
{ constraintText }
</dd>
@@ -495,7 +489,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case nte: NonTemplateMemberEntity if nte.isUseCase =>
<div class="full-signature-block toggleContainer">
<span class="toggle">Full Signature</span>
- <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
+ <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,isSelf = true) }</div>
</div>
case _ => NodeSeq.Empty
}
@@ -532,7 +526,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
val sourceLink: NodeSeq = mbr match {
case dtpl: DocTemplateEntity if (isSelf && dtpl.sourceUrl.isDefined && dtpl.inSource.isDefined && !isReduced) =>
- val (absFile, line) = dtpl.inSource.get
+ val (absFile, _) = dtpl.inSource.get
<dt>Source</dt>
<dd>{ <a href={ dtpl.sourceUrl.get.toString } target="_blank">{ Text(absFile.file.getName) }</a> }</dd>
case _ => NodeSeq.Empty
@@ -645,13 +639,23 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
}
val subclasses = mbr match {
- case dtpl: DocTemplateEntity if isSelf && !isReduced && dtpl.allSubClasses.nonEmpty =>
- <div class="toggleContainer block">
- <span class="toggle">Known Subclasses</span>
- <div class="subClasses hiddenContent">{
- templatesToHtml(dtpl.allSubClasses.sortBy(_.name), scala.xml.Text(", "))
- }</div>
- </div>
+ case dtpl: DocTemplateEntity if isSelf && !isReduced =>
+ val subs: Set[DocTemplateEntity] = HashSet.empty
+ def transitive(dtpl: DocTemplateEntity) {
+ for (sub <- dtpl.directSubClasses if !(subs contains sub)) {
+ subs add sub
+ transitive(sub)
+ }
+ }
+ transitive(dtpl)
+ if (subs.nonEmpty)
+ <div class="toggleContainer block">
+ <span class="toggle">Known Subclasses</span>
+ <div class="subClasses hiddenContent">{
+ templatesToHtml(subs.toList.sortBy(_.name), scala.xml.Text(", "))
+ }</div>
+ </div>
+ else NodeSeq.Empty
case _ => NodeSeq.Empty
}
@@ -660,7 +664,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
case dtpl: DocTemplateEntity if isSelf && !isReduced =>
val diagram = f(dtpl)
if (diagram.isDefined) {
- val s = universe.settings
val diagramSvg = generator.generate(diagram.get, tpl, this)
if (diagramSvg != NodeSeq.Empty) {
<div class="toggleContainer block diagram-container" id={ id + "-container"}>
@@ -770,7 +773,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (isReduced) NodeSeq.Empty else {
def paramsToHtml(vlsss: List[List[ValueParam]]): NodeSeq = {
def param0(vl: ValueParam): NodeSeq =
- // notice the }{ in the next lines, they are necessary to avoid a undesired withspace in output
+ // notice the }{ in the next lines, they are necessary to avoid an undesired withspace in output
<span name={ vl.name }>{
Text(vl.name)
}{ Text(": ") ++ typeToHtml(vl.resultType, hasLinks) }{
@@ -933,13 +936,13 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
if (tpl.universe.settings.useStupidTypes.value)
superTpl match {
case dtpl: DocTemplateEntity =>
- val sig = signature(dtpl, false, true) \ "_"
+ val sig = signature(dtpl, isSelf = false, isReduced = true) \ "_"
sig
case tpl: TemplateEntity =>
Text(tpl.name)
}
else
- typeToHtml(superType, true)
+ typeToHtml(superType, hasLinks = true)
private def constraintToHtml(constraint: Constraint): NodeSeq = constraint match {
case ktcc: KnownTypeClassConstraint =>
@@ -951,21 +954,21 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
context-bounded</a> ++ scala.xml.Text(" by " + tcc.typeClassEntity.qualifiedName + " (" + tcc.typeParamName + ": ") ++
templateToHtml(tcc.typeClassEntity) ++ scala.xml.Text(")")
case impl: ImplicitInScopeConstraint =>
- scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, true) ++ scala.xml.Text(" is in scope")
+ scala.xml.Text("an implicit value of type ") ++ typeToHtml(impl.implicitType, hasLinks = true) ++ scala.xml.Text(" is in scope")
case eq: EqualTypeParamConstraint =>
scala.xml.Text(eq.typeParamName + " is " + eq.rhs.name + " (" + eq.typeParamName + " =:= ") ++
- typeToHtml(eq.rhs, true) ++ scala.xml.Text(")")
+ typeToHtml(eq.rhs, hasLinks = true) ++ scala.xml.Text(")")
case bt: BoundedTypeParamConstraint =>
scala.xml.Text(bt.typeParamName + " is a superclass of " + bt.lowerBound.name + " and a subclass of " +
bt.upperBound.name + " (" + bt.typeParamName + " >: ") ++
- typeToHtml(bt.lowerBound, true) ++ scala.xml.Text(" <: ") ++
- typeToHtml(bt.upperBound, true) ++ scala.xml.Text(")")
+ typeToHtml(bt.lowerBound, hasLinks = true) ++ scala.xml.Text(" <: ") ++
+ typeToHtml(bt.upperBound, hasLinks = true) ++ scala.xml.Text(")")
case lb: LowerBoundedTypeParamConstraint =>
scala.xml.Text(lb.typeParamName + " is a superclass of " + lb.lowerBound.name + " (" + lb.typeParamName + " >: ") ++
- typeToHtml(lb.lowerBound, true) ++ scala.xml.Text(")")
+ typeToHtml(lb.lowerBound, hasLinks = true) ++ scala.xml.Text(")")
case ub: UpperBoundedTypeParamConstraint =>
scala.xml.Text(ub.typeParamName + " is a subclass of " + ub.upperBound.name + " (" + ub.typeParamName + " <: ") ++
- typeToHtml(ub.upperBound, true) ++ scala.xml.Text(")")
+ typeToHtml(ub.upperBound, hasLinks = true) ++ scala.xml.Text(")")
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
index 61c1819d11..61c1819d11 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
index ec00cace75..ab8e9e2756 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala
@@ -42,7 +42,7 @@ object DiagramStats {
private[this] var fixedImages = 0
def printStats(settings: Settings) = {
- if (settings.docDiagramsDebug.value) {
+ if (settings.docDiagramsDebug) {
settings.printMsg("\nDiagram generation running time breakdown:\n")
filterTrack.printStats(settings.printMsg)
modelTrack.printStats(settings.printMsg)
@@ -63,4 +63,4 @@ object DiagramStats {
def addBrokenImage(): Unit = brokenImages += 1
def addFixedImage(): Unit = fixedImages += 1
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
index 847367838c..4ff436bdc6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala
@@ -2,7 +2,9 @@
* @author Damien Obrist
* @author Vlad Ureche
*/
-package scala.tools.nsc
+package scala
+package tools
+package nsc
package doc
package html
package page
@@ -10,7 +12,6 @@ package diagram
import scala.xml.{NodeSeq, XML, PrefixedAttribute, Elem, MetaData, Null, UnprefixedAttribute}
import scala.collection.immutable._
-import javax.xml.parsers.SAXParser
import model._
import model.diagram._
@@ -22,8 +23,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private var pathToLib: String = null
// maps nodes to unique indices
private var node2Index: Map[Node, Int] = null
- // maps an index to its corresponding node
- private var index2Node: Map[Int, Node] = null
// true if the current diagram is a class diagram
private var isInheritanceDiagram = false
// incoming implicit nodes (needed for determining the CSS class of a node)
@@ -34,7 +33,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private var counter = 0
def generate(diagram: Diagram, template: DocTemplateEntity, page: HtmlPage):NodeSeq = {
- counter = counter + 1;
+ counter = counter + 1
this.page = page
pathToLib = "../" * (page.templateToPath(template).size - 1) + "lib/"
val dot = generateDot(diagram)
@@ -42,7 +41,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
// clean things up a bit, so we don't leave garbage on the heap
this.page = null
node2Index = null
- index2Node = null
incomingImplicitNodes = List()
result
}
@@ -116,7 +114,6 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
node2Index = d.nodes.zipWithIndex.toMap
incomingImplicitNodes = List()
}
- index2Node = node2Index map {_.swap}
val implicitsDot = {
if (!isInheritanceDiagram) ""
@@ -212,10 +209,10 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
private def node2Dot(node: Node) = {
// escape HTML characters in node names
- def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;");
+ def escape(name: String) = name.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
// assemble node attribues in a map
- var attr = scala.collection.mutable.Map[String, String]()
+ val attr = scala.collection.mutable.Map[String, String]()
// link
node.doctpl match {
@@ -320,13 +317,13 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
* Calls dot with a given dot string and returns the SVG output.
*/
private def generateSVG(dotInput: String, template: DocTemplateEntity) = {
- val dotOutput = DiagramGenerator.getDotRunner.feedToDot(dotInput, template)
+ val dotOutput = DiagramGenerator.getDotRunner().feedToDot(dotInput, template)
var tSVG = -System.currentTimeMillis
val result = if (dotOutput != null) {
- val src = scala.io.Source.fromString(dotOutput);
+ val src = scala.io.Source.fromString(dotOutput)
try {
- val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, false)
+ val cpa = scala.xml.parsing.ConstructingParser.fromSource(src, preserveWS = false)
val doc = cpa.document()
if (doc != null)
transform(doc.docElem)
@@ -334,7 +331,7 @@ class DotDiagramGenerator(settings: doc.Settings) extends DiagramGenerator {
NodeSeq.Empty
} catch {
case exc: Exception =>
- if (settings.docDiagramsDebug.value) {
+ if (settings.docDiagramsDebug) {
settings.printMsg("\n\n**********************************************************************")
settings.printMsg("Encountered an error while generating page for " + template.qualifiedName)
settings.printMsg(dotInput.toString.split("\n").mkString("\nDot input:\n\t","\n\t",""))
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
index 5cdd5c74a4..4bed106f43 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala
@@ -10,12 +10,10 @@ import java.io.InputStreamReader
import java.io.OutputStreamWriter
import java.io.BufferedWriter
import java.io.BufferedReader
-import java.io.IOException
import scala.sys.process._
import scala.concurrent.SyncVar
import model._
-import model.diagram._
/** This class takes care of running the graphviz dot utility */
class DotRunner(settings: doc.Settings) {
@@ -100,7 +98,7 @@ class DotProcess(settings: doc.Settings) {
assert(!inputString.isSet)
assert(!outputString.isSet)
inputString.put(input)
- var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000)
+ var result = outputString.take(settings.docDiagramsDotTimeout.value * 1000L)
if (error) result = null
result
@@ -183,7 +181,7 @@ class DotProcess(settings: doc.Settings) {
private[this] def outputFn(stdOut: InputStream): Unit = {
val reader = new BufferedReader(new InputStreamReader(stdOut))
- var buffer: StringBuilder = new StringBuilder()
+ val buffer: StringBuilder = new StringBuilder()
try {
var line = reader.readLine
while (!error && line != null) {
@@ -209,7 +207,6 @@ class DotProcess(settings: doc.Settings) {
private[this] def errorFn(stdErr: InputStream): Unit = {
val reader = new BufferedReader(new InputStreamReader(stdErr))
- var buffer: StringBuilder = new StringBuilder()
try {
var line = reader.readLine
while (line != null) {
@@ -225,4 +222,4 @@ class DotProcess(settings: doc.Settings) {
errorBuffer.append(" Error thread in " + templateName + ": Exception: " + exc + "\n")
}
}
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
index 7229603ae5..7229603ae5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-down.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
index b2f2935dc9..b2f2935dc9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/arrow-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
index 97edbd49db..97edbd49db 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
index cb1f638a58..cb1f638a58 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
index 9d7aec792b..9d7aec792b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
index 5dd6e38d2e..5dd6e38d2e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/class_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
index 2e3f5ea530..2e3f5ea530 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/constructorsbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
index 4be145d0af..4be145d0af 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/conversionbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
index 69038337a7..69038337a7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-blue.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
index 36c43be3a2..36c43be3a2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/defbg-green.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
index 5fe33f72f5..5fe33f72f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
index 478f2e38ac..478f2e38ac 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/diagrams.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/diagrams.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
index 0e8c893315..0e8c893315 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
index 4d740f3b17..4d740f3b17 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
index b9b49076a6..b9b49076a6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_left2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
index f127e35b48..f127e35b48 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
index 63a1ae8349..63a1ae8349 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filter_box_right.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
index 542ba4aa5a..542ba4aa5a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
index b5075c16cd..b5075c16cd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
index d613cf5633..d613cf5633 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbarbg.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
index ae2f85823b..ae2f85823b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/filterboxbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
index a0d93f4844..a0d93f4844 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/fullcommenttopbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
index 55fb370a41..55fb370a41 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index 96689ae701..96689ae701 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
index faab0cf1a3..faab0cf1a3 100755
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery-ui.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
index bc3fbc81b2..bc3fbc81b2 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
index 4dd48675b7..4dd48675b7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/jquery.layout.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
index 4688d633fe..4688d633fe 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/modernizr.custom.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
index 9b32288e04..9b32288e04 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li-a.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
index fd0ad06e81..fd0ad06e81 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/navigation-li.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
index ad312793ea..ad312793ea 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
index 67ffca79de..67ffca79de 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
index 6e9f2f743f..6e9f2f743f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
index 7502942eb6..7502942eb6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_class_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
index c777bfce8d..c777bfce8d 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
index 7502942eb6..7502942eb6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/object_to_type_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
index 848dd5963a..848dd5963a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownderbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
index 34a04249ee..34a04249ee 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
index 2ed33b0aa4..2ed33b0aa4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ownerbg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
index 6ea17ac320..6ea17ac320 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
index 529aa93188..529aa93188 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/package_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/package_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
index 00c3378a2a..00c3378a2a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/packagesbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
index d30dbad858..d30dbad858 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/raphael-min.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
index 7d64b9c5c5..7d64b9c5c5 100755
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/ref-index.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/ref-index.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
index 4625f9df74..4625f9df74 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
index 3764f82ccb..3764f82ccb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/remove.psd
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/remove.psd
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
index 4417f5b438..4417f5b438 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/scheduler.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/scheduler.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
index bc29efb3e6..bc29efb3e6 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
index 8313f4975b..8313f4975b 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right-implicits.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
index 04eda2f307..04eda2f307 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
index c89765239e..c89765239e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
index bf984ef0ba..bf984ef0ba 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2-right.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
index a790bb1169..a790bb1169 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/selected2.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/selected2.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
index b6ac4415e4..b6ac4415e4 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
index 9aae5ba0aa..9aae5ba0aa 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/signaturebg2.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
index b066027f04..b066027f04 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
index 6d1caf6d50..6d1caf6d50 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
index 0af34eca4c..0af34eca4c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/tools.tooltip.js
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
index fb961a2eda..fb961a2eda 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
index 625d9251cb..625d9251cb 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
index 88983254ce..88983254ce 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
index d0cd7fd512..d0cd7fd512 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/trait_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
index 6c6e1fe2f5..6c6e1fe2f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
index 04c8794e92..04c8794e92 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
index d8152529fd..d8152529fd 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_diagram.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
index 3b5c47c9e3..3b5c47c9e3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_tags.ai
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
index ef2615bacc..ef2615bacc 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/type_to_object_big.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
index 2fcc77b2e8..2fcc77b2e8 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/typebg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/typebg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
index d5ac639405..d5ac639405 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/unselected.png
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/unselected.png
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
index 2a949311d7..2a949311d7 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/valuemembersbg.gif
Binary files differ
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
index 17d1caeb66..17d1caeb66 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/versions.txt
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/versions.txt
diff --git a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
index 9ba89146c0..4e06e5bd16 100644
--- a/src/compiler/scala/tools/nsc/doc/model/CommentFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala
@@ -18,39 +18,26 @@ import scala.language.postfixOps
* Call `parse` to run the parser. Note that the parser is stateless and
* should only be built once for a given Scaladoc run.
*
- * @param reporter The reporter on which user messages (error, warnings) should be printed.
- *
* @author Manohar Jonnalagedda
* @author Gilles Dubochet */
trait CommentFactory extends base.CommentFactoryBase {
thisFactory: ModelFactory with CommentFactory with MemberLookup =>
val global: Global
- import global.{ reporter, definitions, Symbol }
-
- protected val commentCache = mutable.HashMap.empty[(Symbol, TemplateImpl), Comment]
+ import global.{ reporter, definitions, Symbol, NoSymbol }
- def addCommentBody(sym: Symbol, inTpl: TemplateImpl, docStr: String, docPos: global.Position): Symbol = {
- commentCache += (sym, inTpl) -> parse(docStr, docStr, docPos, None)
- sym
- }
+ protected val commentCache = mutable.HashMap.empty[(Symbol, DocTemplateImpl), Option[Comment]]
- def comment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl): Option[Comment] = {
- val key = (sym, inTpl)
- if (commentCache isDefinedAt key)
- Some(commentCache(key))
- else {
- val c = defineComment(sym, currentTpl, inTpl)
- if (c isDefined) commentCache += (sym, inTpl) -> c.get
- c
- }
- }
+ def comment(sym: Symbol, linkTarget: DocTemplateImpl, inTpl: DocTemplateImpl): Option[Comment] =
+ commentCache.getOrElseUpdate((sym, inTpl), {
+ defineComment(sym, linkTarget, inTpl)
+ })
/** A comment is usualy created by the parser, however for some special
* cases we have to give some `inTpl` comments (parent class for example)
* to the comment of the symbol.
* This function manages some of those cases : Param accessor and Primary constructor */
- def defineComment(sym: Symbol, currentTpl: Option[DocTemplateImpl], inTpl: DocTemplateImpl):Option[Comment] = {
+ def defineComment(sym: Symbol, linkTarget: DocTemplateImpl, inTpl: DocTemplateImpl):Option[Comment] = {
//param accessor case
// We just need the @param argument, we put it into the body
@@ -87,8 +74,7 @@ trait CommentFactory extends base.CommentFactoryBase {
else {
val rawComment = global.expandedDocComment(sym, inTpl.sym).trim
if (rawComment != "") {
- val tplOpt = if (currentTpl.isDefined) currentTpl else Some(inTpl)
- val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), tplOpt)
+ val c = parse(rawComment, global.rawDocComment(sym), global.docCommentPos(sym), linkTarget)
Some(c)
}
else None
@@ -96,9 +82,9 @@ trait CommentFactory extends base.CommentFactoryBase {
}
- protected def parse(comment: String, src: String, pos: Position, inTplOpt: Option[DocTemplateImpl] = None): Comment = {
- assert(!inTplOpt.isDefined || inTplOpt.get != null)
- parseAtSymbol(comment, src, pos, inTplOpt map (_.sym))
+ protected def parse(comment: String, src: String, pos: Position, linkTarget: DocTemplateImpl): Comment = {
+ val sym = if (linkTarget eq null) NoSymbol else linkTarget.sym
+ parseAtSymbol(comment, src, pos, sym)
}
/** Parses a string containing wiki syntax into a `Comment` object.
@@ -107,8 +93,8 @@ trait CommentFactory extends base.CommentFactoryBase {
* - Removed start-of-line star and one whitespace afterwards (if present).
* - Removed all end-of-line whitespace.
* - Only `endOfLine` is used to mark line endings. */
- def parseWiki(string: String, pos: Position, inTplOpt: Option[DocTemplateImpl]): Body = {
- assert(!inTplOpt.isDefined || inTplOpt.get != null)
- parseWikiAtSymbol(string,pos, inTplOpt map (_.sym))
+ def parseWiki(string: String, pos: Position, inTpl: DocTemplateImpl): Body = {
+ val sym = if (inTpl eq null) NoSymbol else inTpl.sym
+ parseWikiAtSymbol(string,pos, sym)
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
index cbc1a23d44..6932f01e9a 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Entity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala
@@ -23,10 +23,6 @@ import diagram._
* - type and value parameters;
* - annotations. */
trait Entity {
-
- /** Similar to symbols, so we can track entities */
- def id: Int
-
/** The name of the entity. Note that the name does not qualify this entity uniquely; use its `qualifiedName`
* instead. */
def name : String
@@ -59,9 +55,6 @@ trait Entity {
/** Indicates whether this entity lives in the types namespace (classes, traits, abstract/alias types) */
def isType: Boolean
-
- /** Indicates whether this entity lives in the terms namespace (objects, packages, methods, values) */
- def isTerm: Boolean
}
object Entity {
@@ -97,9 +90,6 @@ trait TemplateEntity extends Entity {
/** Whether documentation is available for this template. */
def isDocTemplate: Boolean
- /** Whether documentation is available for this template. */
- def isNoDocMemberTemplate: Boolean
-
/** Whether this template is a case class. */
def isCaseClass: Boolean
@@ -149,9 +139,6 @@ trait MemberEntity extends Entity {
/** Some migration warning if this member has a migration annotation, or none otherwise. */
def migration: Option[Body]
- @deprecated("Use `inDefinitionTemplates` instead", "2.9.0")
- def inheritedFrom: List[TemplateEntity]
-
/** For members representing values: the type of the value returned by this member; for members
* representing types: the type itself. */
def resultType: TypeEntity
@@ -177,12 +164,6 @@ trait MemberEntity extends Entity {
/** Whether this member is an abstract type. */
def isAbstractType: Boolean
- /** Whether this member is a template. */
- def isTemplate: Boolean
-
- /** Whether this member is implicit. */
- def isImplicit: Boolean
-
/** Whether this member is abstract. */
def isAbstract: Boolean
@@ -277,10 +258,6 @@ trait DocTemplateEntity extends MemberTemplateEntity {
* This template's linearization contains all of its direct and indirect super-types. */
def linearizationTypes: List[TypeEntity]
- /** All class, trait and object templates for which this template is a direct or indirect super-class or super-trait.
- * Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
- def allSubClasses: List[DocTemplateEntity]
-
/** All class, trait and object templates for which this template is a *direct* super-class or super-trait.
* Only templates for which documentation is available in the universe (`DocTemplateEntity`) are listed. */
def directSubClasses: List[DocTemplateEntity]
@@ -384,14 +361,9 @@ trait RootPackage extends Package
/** A non-template member (method, value, lazy value, variable, constructor, alias type, and abstract type). */
trait NonTemplateMemberEntity extends MemberEntity {
-
/** Whether this member is a use case. A use case is a member which does not exist in the documented code.
* It corresponds to a real member, and provides a simplified, yet compatible signature for that member. */
def isUseCase: Boolean
-
- /** Whether this member is a bridge member. A bridge member does only exist for binary compatibility reasons
- * and should not appear in ScalaDoc. */
- def isBridge: Boolean
}
@@ -506,12 +478,6 @@ trait ImplicitConversion {
/** The result type after the conversion */
def targetType: TypeEntity
- /** The result type after the conversion
- * Note: not all targetTypes have a corresponding template. Examples include conversions resulting in refinement
- * types. Need to check it's not option!
- */
- def targetTemplate: Option[TemplateEntity]
-
/** The components of the implicit conversion type parents */
def targetTypeComponents: List[(TemplateEntity, TypeEntity)]
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 4ee6daf73e..53410fd4ad 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -3,7 +3,8 @@
* @author Pedro Furlanetto
*/
-package scala.tools.nsc
+package scala
+package tools.nsc
package doc
package model
@@ -17,8 +18,6 @@ object IndexModelFactory {
object result extends mutable.HashMap[Char,SymbolMap] {
- /* Owner template ordering */
- implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
implicit def orderingMap = math.Ordering.String
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
index 23259a4ae8..339129bdbc 100644
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -49,13 +49,6 @@ trait MemberLookup extends base.MemberLookupBase {
settings.extUrlMapping get path map { url =>
LinkToExternal(name, url + "#" + name)
}
- } orElse {
- // Deprecated option.
- settings.extUrlPackageMapping find {
- case (pkg, _) => name startsWith pkg
- } map {
- case (_, url) => LinkToExternal(name, url + "#" + name)
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
index d9b173bc43..c4e3c115be 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -1,20 +1,18 @@
/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */
-package scala.tools.nsc
+package scala
+package tools.nsc
package doc
package model
-import base._
import base.comment._
import diagram._
import scala.collection._
import scala.util.matching.Regex
-
import symtab.Flags
import io._
-
import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
@@ -43,20 +41,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def modelFinished: Boolean = _modelFinished
private var universe: Universe = null
- private def dbg(msg: String) = if (sys.props contains "scala.scaladoc.debug") println(msg)
- protected def closestPackage(sym: Symbol) = {
- if (sym.isPackage || sym.isPackageClass) sym
- else sym.enclosingPackage
- }
-
- private def printWithoutPrefix(memberSym: Symbol, templateSym: Symbol) = {
- dbg(
- "memberSym " + memberSym + " templateSym " + templateSym + " encls = " +
- closestPackage(memberSym) + ", " + closestPackage(templateSym)
- )
- memberSym.isOmittablePrefix || (closestPackage(memberSym) == closestPackage(templateSym))
- }
-
def makeModel: Option[Universe] = {
val universe = new Universe { thisUniverse =>
thisFactory.universe = thisUniverse
@@ -65,7 +49,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
_modelFinished = true
// complete the links between model entities, everthing that couldn't have been done before
- universe.rootPackage.completeModel
+ universe.rootPackage.completeModel()
Some(universe) filter (_.rootPackage != null)
}
@@ -86,7 +70,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
/* ============== IMPLEMENTATION PROVIDING ENTITY TYPES ============== */
abstract class EntityImpl(val sym: Symbol, val inTpl: TemplateImpl) extends Entity {
- val id = { ids += 1; ids }
val name = optimize(sym.nameString)
val universe = thisFactory.universe
@@ -100,7 +83,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def annotations = sym.annotations.map(makeAnnotation)
def inPackageObject: Boolean = sym.owner.isModuleClass && sym.owner.sourceModule.isPackageObject
def isType = sym.name.isTypeName
- def isTerm = sym.name.isTermName
}
trait TemplateImpl extends EntityImpl with TemplateEntity {
@@ -112,33 +94,27 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def isObject = sym.isModule && !sym.isPackage
def isCaseClass = sym.isCaseClass
def isRootPackage = false
- def isNoDocMemberTemplate = false
def selfType = if (sym.thisSym eq sym) None else Some(makeType(sym.thisSym.typeOfThis, this))
}
abstract class MemberImpl(sym: Symbol, inTpl: DocTemplateImpl) extends EntityImpl(sym, inTpl) with MemberEntity {
+ // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
+ // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
+ // in the doc comment of MyClass
+ def linkTarget: DocTemplateImpl = inTpl
+
lazy val comment = {
- // If the current tpl is a DocTemplate, we consider itself as the root for resolving link targets (instead of the
- // package the class is in) -- so people can refer to methods directly [[foo]], instead of using [[MyClass.foo]]
- // in the doc comment of MyClass
- val thisTpl = this match {
- case d: DocTemplateImpl => Some(d)
- case _ => None
- }
- if (inTpl != null) thisFactory.comment(sym, thisTpl, inTpl) else None
+ val documented = if (sym.hasAccessorFlag) sym.accessed else sym
+ thisFactory.comment(documented, linkTarget, inTpl)
}
- def group = if (comment.isDefined) comment.get.group.getOrElse(defaultGroup) else defaultGroup
+ def group = comment flatMap (_.group) getOrElse defaultGroup
override def inTemplate = inTpl
override def toRoot: List[MemberImpl] = this :: inTpl.toRoot
- def inDefinitionTemplates = this match {
- case mb: NonTemplateMemberEntity if (mb.useCaseOf.isDefined) =>
- mb.useCaseOf.get.inDefinitionTemplates
- case _ =>
- if (inTpl == null)
- List(makeRootPackage)
- else
- makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
- }
+ def inDefinitionTemplates =
+ if (inTpl == null)
+ docTemplatesCache(RootPackage) :: Nil
+ else
+ makeTemplate(sym.owner)::(sym.allOverriddenSymbols map { inhSym => makeTemplate(inhSym.owner) })
def visibility = {
if (sym.isPrivateLocal) PrivateInInstance()
else if (sym.isProtectedLocal) ProtectedInInstance()
@@ -149,8 +125,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else None
if (sym.isPrivate) PrivateInTemplate(inTpl)
else if (sym.isProtected) ProtectedInTemplate(qual getOrElse inTpl)
- else if (qual.isDefined) PrivateInTemplate(qual.get)
- else Public()
+ else qual match {
+ case Some(q) => PrivateInTemplate(q)
+ case None => Public()
+ }
}
}
def flags = {
@@ -172,9 +150,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def deprecation =
if (sym.isDeprecated)
Some((sym.deprecationMessage, sym.deprecationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
- case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
- case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, Some(inTpl))
+ case (Some(msg), Some(ver)) => parseWiki("''(Since version " + ver + ")'' " + msg, NoPosition, inTpl)
+ case (Some(msg), None) => parseWiki(msg, NoPosition, inTpl)
+ case (None, Some(ver)) => parseWiki("''(Since version " + ver + ")''", NoPosition, inTpl)
case (None, None) => Body(Nil)
})
else
@@ -182,16 +160,14 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def migration =
if(sym.hasMigrationAnnotation)
Some((sym.migrationMessage, sym.migrationVersion) match {
- case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, Some(inTpl))
- case (Some(msg), None) => parseWiki(msg, NoPosition, Some(inTpl))
- case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, Some(inTpl))
+ case (Some(msg), Some(ver)) => parseWiki("''(Changed in version " + ver + ")'' " + msg, NoPosition, inTpl)
+ case (Some(msg), None) => parseWiki(msg, NoPosition, inTpl)
+ case (None, Some(ver)) => parseWiki("''(Changed in version " + ver + ")''", NoPosition, inTpl)
case (None, None) => Body(Nil)
})
else
None
- def inheritedFrom =
- if (inTemplate.sym == this.sym.owner || inTemplate.sym.isPackage) Nil else
- makeTemplate(this.sym.owner) :: (sym.allOverriddenSymbols map { os => makeTemplate(os.owner) })
+
def resultType = {
def resultTpe(tpe: Type): Type = tpe match { // similar to finalResultType, except that it leaves singleton types alone
case PolyType(_, res) => resultTpe(res)
@@ -199,14 +175,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
case NullaryMethodType(res) => resultTpe(res)
case _ => tpe
}
- val tpe = if (!isImplicitlyInherited) sym.tpe else byConversion.get.toType memberInfo sym
+ val tpe = byConversion.fold(sym.tpe) (_.toType memberInfo sym)
makeTypeInTemplateContext(resultTpe(tpe), inTemplate, sym)
}
def isDef = false
def isVal = false
def isLazyVal = false
def isVar = false
- def isImplicit = sym.isImplicit
def isConstructor = false
def isAliasType = false
def isAbstractType = false
@@ -214,7 +189,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// for the explanation of conversion == null see comment on flags
((!sym.isTrait && ((sym hasFlag Flags.ABSTRACT) || (sym hasFlag Flags.DEFERRED)) && (!isImplicitlyInherited)) ||
sym.isAbstractClass || sym.isAbstractType) && !sym.isSynthetic
- def isTemplate = false
+
def signature = externalSignature(sym)
lazy val signatureCompat = {
@@ -243,7 +218,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
(name + tParams(this) + defParams(this) +":"+ resultType.name).replaceAll("\\s","") // no spaces allowed, they break links
}
// these only apply for NonTemplateMemberEntities
- def useCaseOf: Option[MemberEntity] = None
+ def useCaseOf: Option[MemberImpl] = None
def byConversion: Option[ImplicitConversionImpl] = None
def isImplicitlyInherited = false
def isShadowedImplicit = false
@@ -255,8 +230,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* exists, but should not be documented (either it's not included in the source or it's not visible)
*/
class NoDocTemplateImpl(sym: Symbol, inTpl: TemplateImpl) extends EntityImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with NoDocTemplate {
- assert(modelFinished)
- assert(!(noDocTemplatesCache isDefinedAt sym))
+ assert(modelFinished, this)
+ assert(!(noDocTemplatesCache isDefinedAt sym), (sym, noDocTemplatesCache(sym)))
noDocTemplatesCache += (sym -> this)
def isDocTemplate = false
}
@@ -268,25 +243,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
*/
abstract class MemberTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberImpl(sym, inTpl) with TemplateImpl with HigherKindedImpl with MemberTemplateEntity {
// no templates cache for this class, each owner gets its own instance
- override def isTemplate = true
def isDocTemplate = false
- override def isNoDocMemberTemplate = true
lazy val definitionName = optimize(inDefinitionTemplates.head.qualifiedName + "." + name)
def valueParams: List[List[ValueParam]] = Nil /** TODO, these are now only computed for DocTemplates */
- // Seems unused
- // def parentTemplates =
- // if (sym.isPackage || sym == AnyClass)
- // List()
- // else
- // sym.tpe.parents.flatMap { tpe: Type =>
- // val tSym = tpe.typeSymbol
- // if (tSym != NoSymbol)
- // List(makeTemplate(tSym))
- // else
- // List()
- // } filter (_.isInstanceOf[DocTemplateEntity])
-
def parentTypes =
if (sym.isPackage || sym == AnyClass) List() else {
val tps = (this match {
@@ -306,23 +266,24 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
* All ancestors of the template and all non-package members.
*/
abstract class DocTemplateImpl(sym: Symbol, inTpl: DocTemplateImpl) extends MemberTemplateImpl(sym, inTpl) with DocTemplateEntity {
- assert(!modelFinished)
+ assert(!modelFinished, (sym, inTpl))
assert(!(docTemplatesCache isDefinedAt sym), sym)
docTemplatesCache += (sym -> this)
- if (settings.verbose.value)
+ if (settings.verbose)
inform("Creating doc template for " + sym)
+ override def linkTarget: DocTemplateImpl = this
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- protected def inSourceFromSymbol(symbol: Symbol) =
- if (symbol.sourceFile != null && ! symbol.isSynthetic)
- Some((symbol.sourceFile, symbol.pos.line))
+ protected def reprSymbol: Symbol = sym
+
+ def inSource =
+ if (reprSymbol.sourceFile != null && ! reprSymbol.isSynthetic)
+ Some((reprSymbol.sourceFile, reprSymbol.pos.line))
else
None
- def inSource = inSourceFromSymbol(sym)
-
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -344,20 +305,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
else None
}
- protected def linearizationFromSymbol(symbol: Symbol): List[(TemplateEntity, TypeEntity)] = {
- symbol.ancestors map { ancestor =>
- val typeEntity = makeType(symbol.info.baseType(ancestor), this)
- val tmplEntity = makeTemplate(ancestor) match {
- case tmpl: DocTemplateImpl => tmpl registerSubClass this ; tmpl
- case tmpl => tmpl
- }
- (tmplEntity, typeEntity)
- }
- }
-
- lazy val linearization = linearizationFromSymbol(sym)
- def linearizationTemplates = linearization map { _._1 }
- def linearizationTypes = linearization map { _._2 }
+ lazy val (linearizationTemplates, linearizationTypes) =
+ reprSymbol.ancestors map { ancestor =>
+ (makeTemplate(ancestor), makeType(reprSymbol.info.baseType(ancestor), this))
+ } unzip
/* Subclass cache */
private lazy val subClassesCache = (
@@ -368,8 +319,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (subClassesCache != null)
subClassesCache += sc
}
- def allSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
- def directSubClasses = allSubClasses.filter(_.parentTypes.map(_._1).contains(this))
+ def directSubClasses = if (subClassesCache == null) Nil else subClassesCache.toList
/* Implcitly convertible class cache */
private var implicitlyConvertibleClassesCache: mutable.ListBuffer[(DocTemplateImpl, ImplicitConversionImpl)] = null
@@ -388,15 +338,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// the implicit conversions are generated eagerly, but the members generated by implicit conversions are added
// lazily, on completeModel
val conversions: List[ImplicitConversionImpl] =
- if (settings.docImplicits.value) makeImplicitConversions(sym, this) else Nil
+ if (settings.docImplicits) makeImplicitConversions(sym, this) else Nil
// members as given by the compiler
lazy val memberSyms = sym.info.members.filter(s => membersShouldDocument(s, this)).toList
// the inherited templates (classes, traits or objects)
- var memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
+ val memberSymsLazy = memberSyms.filter(t => templateShouldDocument(t, this) && !inOriginalOwner(t, this))
// the direct members (methods, values, vars, types and directly contained templates)
- var memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
+ val memberSymsEager = memberSyms.filter(!memberSymsLazy.contains(_))
// the members generated by the symbols in memberSymsEager
val ownMembers = (memberSymsEager.flatMap(makeMember(_, None, this)))
@@ -419,18 +369,18 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
if (!sym.isAliasType && !sym.isAbstractType)
for (member <- members)
member match {
- case d: DocTemplateImpl => d.completeModel
+ case d: DocTemplateImpl => d.completeModel()
case _ =>
}
members :::= memberSymsLazy.map(modelCreation.createLazyTemplateMember(_, this))
- // compute linearization to register subclasses
- linearization
outgoingImplicitlyConvertedClasses
+ for (pt <- sym.info.parents; parentTemplate <- findTemplateMaybe(pt.typeSymbol)) parentTemplate registerSubClass this
+
// the members generated by the symbols in memberSymsEager PLUS the members from the usecases
- val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf.map(_.asInstanceOf[MemberImpl])).distinct
+ val allMembers = ownMembers ::: ownMembers.flatMap(_.useCaseOf).distinct
implicitsShadowing = makeShadowingTable(allMembers, conversions, this)
// finally, add the members generated by implicit conversions
members :::= conversions.flatMap(_.memberImpls)
@@ -442,17 +392,16 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
conversions flatMap (conv =>
if (!implicitExcluded(conv.conversionQualifiedName))
conv.targetTypeComponents map {
- case pair@(template, tpe) =>
+ case (template, tpe) =>
template match {
case d: DocTemplateImpl if (d != this) => d.registerImplicitlyConvertibleClass(this, conv)
case _ => // nothing
}
- (pair._1, pair._2, conv)
+ (template, tpe, conv)
}
else List()
)
- override def isTemplate = true
override def isDocTemplate = true
private[this] lazy val companionSymbol =
if (sym.isAliasType || sym.isAbstractType) {
@@ -511,47 +460,39 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val (inSource, linearization) = {
- val representive = sym.info.members.find {
- s => s.isPackageObject
- } getOrElse sym
- (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
- }
+ override def reprSymbol = sym.info.members.find (_.isPackageObject) getOrElse sym
+
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
abstract class RootPackageImpl(sym: Symbol) extends PackageImpl(sym, null) with RootPackageEntity
abstract class NonTemplateMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
- override val useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
+ override val useCaseOf: Option[MemberImpl], inTpl: DocTemplateImpl)
extends MemberImpl(sym, inTpl) with NonTemplateMemberEntity {
override lazy val comment = {
- val inRealTpl =
- /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
- * 1. the target of the implicit conversion
- * 2. the definition template (owner)
- * 3. the current template
- */
- if (conversion.isDefined) findTemplateMaybe(conversion.get.toType.typeSymbol) match {
- case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
- case _ => findTemplateMaybe(sym.owner) match {
- case Some(d) if d != makeRootPackage => d //in case of NoSymbol, it will give us the root package
- case _ => inTpl
- }
- } else inTpl
- if (inRealTpl != null) thisFactory.comment(sym, None, inRealTpl) else None
+ def nonRootTemplate(sym: Symbol): Option[DocTemplateImpl] =
+ if (sym eq RootPackage) None else findTemplateMaybe(sym)
+ /* Variable precendence order for implicitly added members: Take the variable defifinitions from ...
+ * 1. the target of the implicit conversion
+ * 2. the definition template (owner)
+ * 3. the current template
+ */
+ val inRealTpl = conversion.flatMap { conv =>
+ nonRootTemplate(conv.toType.typeSymbol)
+ } orElse nonRootTemplate(sym.owner) orElse Option(inTpl)
+ inRealTpl flatMap { tpl =>
+ thisFactory.comment(sym, tpl, tpl)
+ }
}
+ override def inDefinitionTemplates = useCaseOf.fold(super.inDefinitionTemplates)(_.inDefinitionTemplates)
+
override def qualifiedName = optimize(inTemplate.qualifiedName + "#" + name)
lazy val definitionName = {
- // this contrived name is here just to satisfy some older tests -- if you decide to remove it, be my guest, and
- // also remove property("package object") from test/scaladoc/scalacheck/HtmlFactoryTest.scala so you don't break
- // the test suite...
- val packageObject = if (inPackageObject) ".package" else ""
- if (!conversion.isDefined) optimize(inDefinitionTemplates.head.qualifiedName + packageObject + "#" + name)
- else optimize(conversion.get.conversionQualifiedName + packageObject + "#" + name)
+ val qualifiedName = conversion.fold(inDefinitionTemplates.head.qualifiedName)(_.conversionQualifiedName)
+ optimize(qualifiedName + "#" + name)
}
- def isBridge = sym.isBridge
def isUseCase = useCaseOf.isDefined
override def byConversion: Option[ImplicitConversionImpl] = conversion
override def isImplicitlyInherited = { assert(modelFinished); conversion.isDefined }
@@ -561,10 +502,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
abstract class NonTemplateParamMemberImpl(sym: Symbol, conversion: Option[ImplicitConversionImpl],
- useCaseOf: Option[MemberEntity], inTpl: DocTemplateImpl)
+ useCaseOf: Option[MemberImpl], inTpl: DocTemplateImpl)
extends NonTemplateMemberImpl(sym, conversion, useCaseOf, inTpl) {
def valueParams = {
- val info = if (!isImplicitlyInherited) sym.info else conversion.get.toType memberInfo sym
+ val info = conversion.fold(sym.info)(_.toType memberInfo sym)
info.paramss map { ps => (ps.zipWithIndex) map { case (p, i) =>
if (p.nameString contains "$") makeValueParam(p, inTpl, optimize("arg" + i)) else makeValueParam(p, inTpl)
}}
@@ -666,7 +607,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
*/
def createTemplate(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
// don't call this after the model finished!
- assert(!modelFinished)
+ assert(!modelFinished, (aSym, inTpl))
def createRootPackageComment: Option[Comment] =
if(settings.docRootContent.isDefault) None
@@ -674,7 +615,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
import Streamable._
Path(settings.docRootContent.value) match {
case f : File => {
- val rootComment = closing(f.inputStream)(is => parse(slurp(is), "", NoPosition, Option(inTpl)))
+ val rootComment = closing(f.inputStream())(is => parse(slurp(is), "", NoPosition, inTpl))
Some(rootComment)
}
case _ => None
@@ -682,7 +623,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def createDocTemplate(bSym: Symbol, inTpl: DocTemplateImpl): DocTemplateImpl = {
- assert(!modelFinished) // only created BEFORE the model is finished
+ assert(!modelFinished, (bSym, inTpl)) // only created BEFORE the model is finished
if (bSym.isAliasType && bSym != AnyRefClass)
new DocTemplateImpl(bSym, inTpl) with AliasImpl with AliasType { override def isAliasType = true }
else if (bSym.isAbstractType)
@@ -713,7 +654,6 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
override def inTemplate = this
override def toRoot = this :: Nil
override def qualifiedName = "_root_"
- override def inheritedFrom = Nil
override def isRootPackage = true
override lazy val memberSyms =
(bSym.info.members ++ EmptyPackage.info.members).toList filter { s =>
@@ -783,17 +723,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- /** Get the root package */
- def makeRootPackage: PackageImpl = docTemplatesCache(RootPackage).asInstanceOf[PackageImpl]
-
// TODO: Should be able to override the type
def makeMember(aSym: Symbol, conversion: Option[ImplicitConversionImpl], inTpl: DocTemplateImpl): List[MemberImpl] = {
def makeMember0(bSym: Symbol, useCaseOf: Option[MemberImpl]): Option[MemberImpl] = {
if (bSym.isGetter && bSym.isLazy)
Some(new NonTemplateMemberImpl(bSym, conversion, useCaseOf, inTpl) with Val {
- override lazy val comment = // The analyser does not duplicate the lazy val's DocDef when it introduces its accessor.
- thisFactory.comment(bSym.accessed, None, inTpl.asInstanceOf[DocTemplateImpl]) // This hack should be removed after analyser is fixed.
override def isLazyVal = true
})
else if (bSym.isGetter && bSym.accessed.isMutable)
@@ -860,16 +795,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def findMember(aSym: Symbol, inTpl: DocTemplateImpl): Option[MemberImpl] = {
- val tplSym = normalizeTemplate(aSym.owner)
+ normalizeTemplate(aSym.owner)
inTpl.members.find(_.sym == aSym)
}
- @deprecated("Use `findLinkTarget` instead.", "2.10.0")
- def findTemplate(query: String): Option[DocTemplateImpl] = {
- assert(modelFinished)
- docTemplatesCache.values find { (tpl: DocTemplateImpl) => tpl.qualifiedName == query && !packageDropped(tpl) && !tpl.isObject }
- }
-
def findTemplateMaybe(aSym: Symbol): Option[DocTemplateImpl] = {
assert(modelFinished)
docTemplatesCache.get(normalizeTemplate(aSym)).filterNot(packageDropped(_))
@@ -880,20 +809,12 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def makeTemplate(aSym: Symbol, inTpl: Option[TemplateImpl]): TemplateImpl = {
assert(modelFinished)
- def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl = {
- val bSym = normalizeTemplate(aSym)
- noDocTemplatesCache.get(bSym) match {
- case Some(noDocTpl) => noDocTpl
- case None => new NoDocTemplateImpl(bSym, inTpl)
- }
- }
+ def makeNoDocTemplate(aSym: Symbol, inTpl: TemplateImpl): NoDocTemplateImpl =
+ noDocTemplatesCache getOrElse (aSym, new NoDocTemplateImpl(aSym, inTpl))
- findTemplateMaybe(aSym) match {
- case Some(dtpl) =>
- dtpl
- case None =>
- val bSym = normalizeTemplate(aSym)
- makeNoDocTemplate(bSym, if (inTpl.isDefined) inTpl.get else makeTemplate(bSym.owner))
+ findTemplateMaybe(aSym) getOrElse {
+ val bSym = normalizeTemplate(aSym)
+ makeNoDocTemplate(bSym, inTpl getOrElse makeTemplate(bSym.owner))
}
}
@@ -983,7 +904,7 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
def ownerTpl(sym: Symbol): Symbol =
if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
val tpe =
- if (thisFactory.settings.useStupidTypes.value) aType else {
+ if (thisFactory.settings.useStupidTypes) aType else {
def ownerTpl(sym: Symbol): Symbol =
if (sym.isClass || sym.isModule || sym == NoSymbol) sym else ownerTpl(sym.owner)
val fixedSym = if (inTpl.sym.isModule) inTpl.sym.moduleClass else inTpl.sym
@@ -998,10 +919,10 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
val ignoreParents = Set[Symbol](AnyClass, AnyRefClass, ObjectClass)
val filtParents =
// we don't want to expose too many links to AnyRef, that will just be redundant information
- if (tpl.isDefined && { val sym = tpl.get.sym; (!sym.isModule && parents.length < 2) || (sym == AnyValClass) || (sym == AnyRefClass) || (sym == AnyClass) })
- parents
- else
- parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
+ tpl match {
+ case Some(tpl) if (!tpl.sym.isModule && parents.length < 2) || (tpl.sym == AnyValClass) || (tpl.sym == AnyRefClass) || (tpl.sym == AnyClass) => parents
+ case _ => parents.filterNot((p: Type) => ignoreParents(p.typeSymbol))
+ }
/** Returns:
* - a DocTemplate if the type's symbol is documented
@@ -1032,9 +953,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
def makeQualifiedName(sym: Symbol, relativeTo: Option[Symbol] = None): String = {
- val stop = if (relativeTo.isDefined) relativeTo.get.ownerChain.toSet else Set[Symbol]()
+ val stop = relativeTo map (_.ownerChain.toSet) getOrElse Set[Symbol]()
var sym1 = sym
- var path = new StringBuilder()
+ val path = new StringBuilder()
// var path = List[Symbol]()
while ((sym1 != NoSymbol) && (path.isEmpty || !stop(sym1))) {
@@ -1095,9 +1016,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
// whether or not to create a page for an {abstract,alias} type
def typeShouldDocument(bSym: Symbol, inTpl: DocTemplateImpl) =
- (settings.docExpandAllTypes.value && (bSym.sourceFile != null)) ||
+ (settings.docExpandAllTypes && (bSym.sourceFile != null)) ||
(bSym.isAliasType || bSym.isAbstractType) &&
{ val rawComment = global.expandedDocComment(bSym, inTpl.sym)
rawComment.contains("@template") || rawComment.contains("@documentable") }
}
-
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
index f88251b22e..f984b4579f 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala
@@ -11,12 +11,7 @@ package doc
package model
import scala.collection._
-import scala.util.matching.Regex
-
import symtab.Flags
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
/**
* This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them.
@@ -56,7 +51,6 @@ trait ModelFactoryImplicitSupport {
import global._
import global.analyzer._
import global.definitions._
- import rootMirror.{RootPackage, RootClass, EmptyPackage, EmptyPackageClass}
import settings.hardcoded
// debugging:
@@ -71,7 +65,7 @@ trait ModelFactoryImplicitSupport {
* class A[T]
* class B extends A[Int]
* class C extends A[String]
- * implicit def pimpA[T: Numeric](a: A[T]): D
+ * implicit def enrichA[T: Numeric](a: A[T]): D
* }}}
* For B, no constraints are generated as Numeric[Int] is already in the default scope. On the other hand, for the
* conversion from C to D, depending on -implicits-show-all, the conversion can:
@@ -94,9 +88,9 @@ trait ModelFactoryImplicitSupport {
// But we don't want that, so we'll simply refuse to find implicit conversions on for Nothing and Null
if (!(sym.isClass || sym.isTrait || sym == AnyRefClass) || sym == NothingClass || sym == NullClass) Nil
else {
- var context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
+ val context: global.analyzer.Context = global.analyzer.rootContext(NoCompilationUnit)
- val results = global.analyzer.allViewsFrom(sym.tpe, context, sym.typeParams)
+ val results = global.analyzer.allViewsFrom(sym.tpe_*, context, sym.typeParams)
var conversions = results.flatMap(result => makeImplicitConversion(sym, result._1, result._2, context, inTpl))
// also keep empty conversions, so they appear in diagrams
// conversions = conversions.filter(!_.members.isEmpty)
@@ -107,7 +101,7 @@ trait ModelFactoryImplicitSupport {
hardcoded.arraySkipConversions.contains(conv.conversionQualifiedName))
// Filter out non-sensical conversions from value types
- if (isPrimitiveValueType(sym.tpe))
+ if (isPrimitiveValueType(sym.tpe_*))
conversions = conversions.filter((ic: ImplicitConversionImpl) =>
hardcoded.valueClassFilter(sym.nameString, ic.conversionQualifiedName))
@@ -127,13 +121,13 @@ trait ModelFactoryImplicitSupport {
* What? in details:
* - say we start from a class A[T1, T2, T3, T4]
* - we have an implicit function (view) in scope:
- * def pimpA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): PimpedA
- * - A is converted to PimpedA ONLY if a couple of constraints are satisfied:
+ * def enrichA[T3 <: Long, T4](a: A[Int, Foo[Bar[X]], T3, T4])(implicit ev1: TypeTag[T4], ev2: Numeric[T4]): EnrichedA
+ * - A is converted to EnrichedA ONLY if a couple of constraints are satisfied:
* * T1 must be equal to Int
* * T2 must be equal to Foo[Bar[X]]
* * T3 must be upper bounded by Long
* * there must be evidence of Numeric[T4] and a TypeTag[T4] within scope
- * - the final type is PimpedA and A therefore inherits a couple of members from pimpedA
+ * - the final type is EnrichedA and A therefore inherits a couple of members from enrichA
*
* How?
* some notes:
@@ -176,7 +170,7 @@ trait ModelFactoryImplicitSupport {
val newContext = context.makeImplicit(context.ambiguousErrors)
newContext.macrosEnabled = false
val newTyper = global.analyzer.newTyper(newContext)
- newTyper.silent(_.typed(appliedTree, global.analyzer.EXPRmode, WildcardType), false) match {
+ newTyper.silent(_.typed(appliedTree), reportAmbiguousErrors = false) match {
case global.analyzer.SilentResultValue(t: Tree) => t
case global.analyzer.SilentTypeError(err) =>
@@ -232,12 +226,9 @@ trait ModelFactoryImplicitSupport {
// look for type variables in the type. If there are none, we can decide if the implicit is there or not
if (implType.isTrivial) {
try {
- context.flushBuffer() /* any errors here should not prevent future findings */
- // TODO: Not sure this is the right thing to do -- seems similar to what scalac should be doing
- val context2 = context.make(context.unit, context.tree, sym.owner, context.scope, context.imports)
- val search = inferImplicit(EmptyTree, tpe, false, false, context2, false)
- context.flushBuffer() /* any errors here should not prevent future findings */
-
+ // TODO: Not sure if `owner = sym.owner` is the right thing to do -- seems similar to what scalac should be doing
+ val silentContext = context.make(owner = sym.owner).makeSilent(reportAmbiguousErrors = false)
+ val search = inferImplicit(EmptyTree, tpe, false, false, silentContext, false)
available = Some(search.tree != EmptyTree)
} catch {
case _: TypeError =>
@@ -247,7 +238,7 @@ trait ModelFactoryImplicitSupport {
available match {
case Some(true) =>
Nil
- case Some(false) if (!settings.docImplicitsShowAll.value) =>
+ case Some(false) if !settings.docImplicitsShowAll =>
// if -implicits-show-all is not set, we get rid of impossible conversions (such as Numeric[String])
throw new ImplicitNotFound(implType)
case _ =>
@@ -294,7 +285,7 @@ trait ModelFactoryImplicitSupport {
(tparams zip constrs) flatMap {
case (tparam, constr) => {
uniteConstraints(constr) match {
- case (loBounds, upBounds) => (loBounds filter (_ != NothingClass.tpe), upBounds filter (_ != AnyClass.tpe)) match {
+ case (loBounds, upBounds) => (loBounds filter (_ != NothingTpe), upBounds filter (_ != AnyTpe)) match {
case (Nil, Nil) =>
Nil
case (List(lo), List(up)) if (lo == up) =>
@@ -320,7 +311,7 @@ trait ModelFactoryImplicitSupport {
})
case other =>
// this is likely an error on the lub/glb side
- error("Scaladoc implicits: Error computing lub/glb for: " + (tparam, constr) + ":\n" + other)
+ error("Scaladoc implicits: Error computing lub/glb for: " + ((tparam, constr)) + ":\n" + other)
Nil
}
}
@@ -341,21 +332,11 @@ trait ModelFactoryImplicitSupport {
def targetType: TypeEntity = makeType(toType, inTpl)
- def convertorOwner: TemplateEntity =
- if (convSym != NoSymbol)
- makeTemplate(convSym.owner)
- else {
+ def convertorOwner: TemplateEntity = {
+ if (convSym eq NoSymbol)
error("Scaladoc implicits: " + toString + " = NoSymbol!")
- makeRootPackage
- }
- def targetTemplate: Option[TemplateEntity] = toType match {
- // @Vlad: I'm being extra conservative in template creation -- I don't want to create templates for complex types
- // such as refinement types because the template can't represent the type corectly (a template corresponds to a
- // package, class, trait or object)
- case t: TypeRef => Some(makeTemplate(t.sym))
- case RefinedType(parents, decls) => None
- case _ => error("Scaladoc implicits: Could not create template for: " + toType + " of type " + toType.getClass); None
+ makeTemplate(convSym.owner)
}
def targetTypeComponents: List[(TemplateEntity, TypeEntity)] = makeParentTypes(toType, None, inTpl)
@@ -385,7 +366,6 @@ trait ModelFactoryImplicitSupport {
lazy val memberImpls: List[MemberImpl] = {
// Obtain the members inherited by the implicit conversion
val memberSyms = toType.members.filter(implicitShouldDocument(_)).toList
- val existingSyms = sym.info.members
// Debugging part :)
debug(sym.nameString + "\n" + "=" * sym.nameString.length())
@@ -422,66 +402,52 @@ trait ModelFactoryImplicitSupport {
/* ========================= HELPER METHODS ========================== */
/**
* Computes the shadowing table for all the members in the implicit conversions
- * @param mbrs All template's members, including usecases and full signature members
+ * @param members All template's members, including usecases and full signature members
* @param convs All the conversions the template takes part in
- * @param inTpl the ususal :)
+ * @param inTpl the usual :)
*/
- def makeShadowingTable(mbrs: List[MemberImpl],
+ def makeShadowingTable(members: List[MemberImpl],
convs: List[ImplicitConversionImpl],
inTpl: DocTemplateImpl): Map[MemberEntity, ImplicitMemberShadowing] = {
assert(modelFinished)
- var shadowingTable = Map[MemberEntity, ImplicitMemberShadowing]()
+ val shadowingTable = mutable.Map[MemberEntity, ImplicitMemberShadowing]()
+ val membersByName: Map[Name, List[MemberImpl]] = members.groupBy(_.sym.name)
+ val convsByMember = (Map.empty[MemberImpl, ImplicitConversionImpl] /: convs) {
+ case (map, conv) => map ++ conv.memberImpls.map (_ -> conv)
+ }
for (conv <- convs) {
- val otherConvs = convs.filterNot(_ == conv)
+ val otherConvMembers: Map[Name, List[MemberImpl]] = convs filterNot (_ == conv) flatMap (_.memberImpls) groupBy (_.sym.name)
for (member <- conv.memberImpls) {
- // for each member in our list
val sym1 = member.sym
val tpe1 = conv.toType.memberInfo(sym1)
- // check if it's shadowed by a member in the original class
- var shadowedBySyms: List[Symbol] = List()
- for (mbr <- mbrs) {
- val sym2 = mbr.sym
- if (sym1.name == sym2.name) {
- val shadowed = !settings.docImplicitsSoundShadowing.value || {
- val tpe2 = inTpl.sym.info.memberInfo(sym2)
- !isDistinguishableFrom(tpe1, tpe2)
- }
- if (shadowed)
- shadowedBySyms ::= sym2
- }
+ // check if it's shadowed by a member in the original class.
+ val shadowed = membersByName.get(sym1.name).toList.flatten filter { other =>
+ !settings.docImplicitsSoundShadowing.value || !isDistinguishableFrom(tpe1, inTpl.sym.info.memberInfo(other.sym))
}
- val shadowedByMembers = mbrs.filter((mb: MemberImpl) => shadowedBySyms.contains(mb.sym))
-
- // check if it's shadowed by another member
- var ambiguousByMembers: List[MemberEntity] = List()
- for (conv <- otherConvs)
- for (member2 <- conv.memberImpls) {
- val sym2 = member2.sym
- if (sym1.name == sym2.name) {
- val tpe2 = conv.toType.memberInfo(sym2)
- // Ambiguity should be an equivalence relation
- val ambiguated = !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
- if (ambiguated)
- ambiguousByMembers ::= member2
- }
- }
+ // check if it's shadowed by another conversion.
+ val ambiguous = otherConvMembers.get(sym1.name).toList.flatten filter { other =>
+ val tpe2 = convsByMember(other).toType.memberInfo(other.sym)
+ !isDistinguishableFrom(tpe1, tpe2) || !isDistinguishableFrom(tpe2, tpe1)
+ }
// we finally have the shadowing info
- val shadowing = new ImplicitMemberShadowing {
- def shadowingMembers: List[MemberEntity] = shadowedByMembers
- def ambiguatingMembers: List[MemberEntity] = ambiguousByMembers
- }
+ if (!shadowed.isEmpty || !ambiguous.isEmpty) {
+ val shadowing = new ImplicitMemberShadowing {
+ def shadowingMembers: List[MemberEntity] = shadowed
+ def ambiguatingMembers: List[MemberEntity] = ambiguous
+ }
- shadowingTable += (member -> shadowing)
+ shadowingTable += (member -> shadowing)
+ }
}
}
- shadowingTable
+ shadowingTable.toMap
}
@@ -511,25 +477,25 @@ trait ModelFactoryImplicitSupport {
/**
* Make implicits explicit - Not used curently
*/
- object implicitToExplicit extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case MethodType(params, resultType) =>
- MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
- case other =>
- other
- }
- }
+ // object implicitToExplicit extends TypeMap {
+ // def apply(tp: Type): Type = mapOver(tp) match {
+ // case MethodType(params, resultType) =>
+ // MethodType(params.map(param => if (param.isImplicit) param.cloneSymbol.resetFlag(Flags.IMPLICIT) else param), resultType)
+ // case other =>
+ // other
+ // }
+ // }
/**
* removeImplicitParameters transforms implicit parameters from the view result type into constraints and
* returns the simplified type of the view
*
* for the example view:
- * implicit def pimpMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * implicit def enrichMyClass[T](a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T]
* the implicit view result type is:
- * (a: MyClass[T])(implicit ev: Numeric[T]): PimpedMyClass[T]
+ * (a: MyClass[T])(implicit ev: Numeric[T]): EnrichedMyClass[T]
* and the simplified type will be:
- * MyClass[T] => PimpedMyClass[T]
+ * MyClass[T] => EnrichedMyClass[T]
*/
def removeImplicitParameters(viewType: Type): (Type, List[Type]) = {
@@ -566,7 +532,7 @@ trait ModelFactoryImplicitSupport {
object wildcardToNothing extends TypeMap {
def apply(tp: Type): Type = mapOver(tp) match {
case WildcardType =>
- NothingClass.tpe
+ NothingTpe
case other =>
other
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
index 844a509b7e..d5048dcfa3 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala
@@ -8,13 +8,6 @@ import base._
import diagram._
import scala.collection._
-import scala.util.matching.Regex
-
-import symtab.Flags
-
-import io._
-
-import model.{ RootPackage => RootPackageEntity }
/** This trait extracts all required information for documentation from compilation units */
trait ModelFactoryTypeSupport {
@@ -28,14 +21,11 @@ trait ModelFactoryTypeSupport {
import global._
import definitions.{ ObjectClass, NothingClass, AnyClass, AnyValClass, AnyRefClass }
- import rootMirror.{ RootPackage, RootClass, EmptyPackage }
protected val typeCache = new mutable.LinkedHashMap[Type, TypeEntity]
/** */
def makeType(aType: Type, inTpl: TemplateImpl): TypeEntity = {
- def templatePackage = closestPackage(inTpl.sym)
-
def createTypeEntity = new TypeEntity {
private var nameBuffer = new StringBuilder
private var refBuffer = new immutable.TreeMap[Int, (LinkTo, Int)]
@@ -150,7 +140,7 @@ trait ModelFactoryTypeSupport {
}
val prefix =
- if (!settings.docNoPrefixes.value && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) {
+ if (!settings.docNoPrefixes && needsPrefix && (bSym != AnyRefClass /* which we normalize */)) {
if (!owner.isRefinementClass) {
val qName = makeQualifiedName(owner, Some(inTpl.sym))
if (qName != "") qName + "." else ""
@@ -234,7 +224,6 @@ trait ModelFactoryTypeSupport {
def appendClauses = {
nameBuffer append " forSome {"
var first = true
- val qset = quantified.toSet
for (sym <- quantified) {
if (!first) { nameBuffer append ", " } else first = false
if (sym.isSingletonExistential) {
@@ -319,7 +308,7 @@ trait ModelFactoryTypeSupport {
// SI-4360: Entity caching depends on both the type AND the template it's in, as the prefixes might change for the
// same type based on the template the type is shown in.
- if (settings.docNoPrefixes.value)
+ if (settings.docNoPrefixes)
typeCache.getOrElseUpdate(aType, createTypeEntity)
else createTypeEntity
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
index 5b4ec4a40b..5b4ec4a40b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TreeEntity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
index fdad84d0bc..b381176b17 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -21,7 +21,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
def makeTree(rhs: Tree): TreeEntity = {
- var expr = new StringBuilder
+ val expr = new StringBuilder
var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
rhs.pos match {
@@ -39,7 +39,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
* stores it in tree.refs with its position
*/
def makeLink(rhs: Tree){
- var start = pos.startOrPoint - firstIndex
+ val start = pos.startOrPoint - firstIndex
val end = pos.endOrPoint - firstIndex
if(start != end) {
var asym = rhs.symbol
diff --git a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
index cf5c1fb3fb..cf5c1fb3fb 100644
--- a/src/compiler/scala/tools/nsc/doc/model/TypeEntity.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
index f712869a4b..f712869a4b 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ValueArgument.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
index 22580805aa..22580805aa 100644
--- a/src/compiler/scala/tools/nsc/doc/model/Visibility.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
index c2aa1f17f3..150b293b81 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
@@ -36,20 +36,12 @@ case class InheritanceDiagram(thisNode: ThisNode,
override def isInheritanceDiagram = true
lazy val depthInfo = new DepthInfo {
def maxDepth = 3
- def nodeDepth(node: Node) =
- if (node == thisNode) 1
- else if (superClasses.contains(node)) 0
- else if (subClasses.contains(node)) 2
- else if (incomingImplicits.contains(node) || outgoingImplicits.contains(node)) 1
- else -1
}
}
trait DepthInfo {
/** Gives the maximum depth */
def maxDepth: Int
- /** Gives the depth of any node in the diagram or -1 if the node is not in the diagram */
- def nodeDepth(node: Node): Int
}
abstract class Node {
@@ -142,5 +134,4 @@ class ContentDiagramDepth(pack: ContentDiagram) extends DepthInfo {
}
val maxDepth = _maxDepth
- def nodeDepth(node: Node) = _nodeDepth.getOrElse(node, -1)
-} \ No newline at end of file
+}
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
index cd60865ce7..44d8886e4e 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala
@@ -6,9 +6,6 @@ import model._
import java.util.regex.{Pattern, Matcher}
import scala.util.matching.Regex
-// statistics
-import html.page.diagram.DiagramStats
-
/**
* This trait takes care of parsing @{inheritance, content}Diagram annotations
*
@@ -66,7 +63,7 @@ trait DiagramDirectiveParser {
NoDiagramAtAll
if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, true)
+ makeDiagramFilter(template, template.comment.get.inheritDiagram, defaultFilter, isInheritanceDiagram = true)
else
defaultFilter
}
@@ -75,7 +72,7 @@ trait DiagramDirectiveParser {
def makeContentDiagramFilter(template: DocTemplateImpl): DiagramFilter = {
val defaultFilter = if (template.isPackage || template.isObject) FullDiagram else NoDiagramAtAll
if (template.comment.isDefined)
- makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, false)
+ makeDiagramFilter(template, template.comment.get.contentDiagram, defaultFilter, isInheritanceDiagram = false)
else
defaultFilter
}
@@ -153,7 +150,6 @@ trait DiagramDirectiveParser {
private val NodeSpecRegex = "\\\"[A-Za-z\\*][A-Za-z\\.\\*]*\\\""
private val NodeSpecPattern = Pattern.compile(NodeSpecRegex)
private val EdgeSpecRegex = "\\(" + NodeSpecRegex + "\\s*\\->\\s*" + NodeSpecRegex + "\\)"
- private val EdgeSpecPattern = Pattern.compile(NodeSpecRegex)
// And the composed regexes:
private val HideNodesRegex = new Regex("^hideNodes(\\s*" + NodeSpecRegex + ")+$")
private val HideEdgesRegex = new Regex("^hideEdges(\\s*" + EdgeSpecRegex + ")+$")
@@ -182,7 +178,7 @@ trait DiagramDirectiveParser {
def warning(message: String) = {
// we need the position from the package object (well, ideally its comment, but yeah ...)
val sym = if (template.sym.isPackage) template.sym.info.member(global.nme.PACKAGE) else template.sym
- assert((sym != global.NoSymbol) || (sym == global.definitions.RootPackage))
+ assert((sym != global.NoSymbol) || (sym == global.rootMirror.RootPackage))
global.reporter.warning(sym.pos, message)
}
@@ -252,7 +248,7 @@ trait DiagramDirectiveParser {
hideNodesFilter = hideNodesFilter0,
hideEdgesFilter = hideEdgesFilter0)
- if (settings.docDiagramsDebug.value && result != NoDiagramAtAll && result != FullDiagram)
+ if (settings.docDiagramsDebug && result != NoDiagramAtAll && result != FullDiagram)
settings.printMsg(template.kind + " " + template.qualifiedName + " filter: " + result)
tFilter += System.currentTimeMillis
diff --git a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
index cb54a739bf..87d7ece8f2 100644
--- a/src/compiler/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala
@@ -3,7 +3,6 @@ package model
package diagram
import model._
-import scala.collection.mutable
// statistics
import html.page.diagram.DiagramStats
@@ -47,7 +46,7 @@ trait DiagramFactory extends DiagramDirectiveParser {
val thisNode = ThisNode(tpl.resultType, Some(tpl))(Some(tpl.qualifiedName + " (this " + tpl.kind + ")"))
// superclasses
- var superclasses: List[Node] =
+ val superclasses: List[Node] =
tpl.parentTypes.collect {
case p: (TemplateEntity, TypeEntity) if !classExcluded(p._1) => NormalNode(p._2, Some(p._1))()
}.reverse
diff --git a/src/partest/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
index b9abff69d8..f0a9caac15 100644
--- a/src/partest/scala/tools/partest/ScaladocModelTest.scala
+++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala
@@ -5,11 +5,10 @@
package scala.tools.partest
-import scala.tools.partest._
-import java.io._
+import scala.tools.nsc
import scala.tools.nsc._
import scala.tools.nsc.util.CommandLineParser
-import scala.tools.nsc.doc.{Settings, DocFactory, Universe}
+import scala.tools.nsc.doc.{ DocFactory, Universe }
import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.diagram._
import scala.tools.nsc.doc.base.comment._
@@ -80,14 +79,14 @@ abstract class ScaladocModelTest extends DirectTest {
System.setErr(prevErr)
}
- private[this] var settings: Settings = null
+ private[this] var settings: doc.Settings = null
// create a new scaladoc compiler
private[this] def newDocFactory: DocFactory = {
- settings = new Settings(_ => ())
+ settings = new doc.Settings(_ => ())
settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"!
val args = extraSettings + " " + scaladocSettings
- val command = new ScalaDoc.Command((CommandLineParser tokenize (args)), settings)
+ new ScalaDoc.Command((CommandLineParser tokenize (args)), settings) // side-effecting, I think
val docFact = new DocFactory(new ConsoleReporter(settings), settings)
docFact
}
@@ -98,7 +97,6 @@ abstract class ScaladocModelTest extends DirectTest {
// so we don't get the newSettings warning
override def isDebug = false
-
// finally, enable easy navigation inside the entities
object access {
diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties
index 5fcbfaf0f1..961c60f48c 100644
--- a/src/scalap/decoder.properties
+++ b/src/scalap/decoder.properties
@@ -1,2 +1,2 @@
version.number=2.0.1
-copyright.string=(c) 2002-2011 LAMP/EPFL
+copyright.string=(c) 2002-2013 LAMP/EPFL
diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala
index a151e3067e..9f139cb5ea 100644
--- a/src/scalap/scala/tools/scalap/Arguments.scala
+++ b/src/scalap/scala/tools/scalap/Arguments.scala
@@ -87,7 +87,7 @@ object Arguments {
i += 2
}
} else {
- var iter = prefixes.iterator
+ val iter = prefixes.iterator
val j = i
while ((i == j) && iter.hasNext) {
val prefix = iter.next
diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
index bb001623a8..9c72bdbf1e 100644
--- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala
+++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala
@@ -6,7 +6,8 @@
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
class ByteArrayReader(content: Array[Byte]) {
diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala
index 8082b6befe..f62df285f9 100644
--- a/src/scalap/scala/tools/scalap/Classfile.scala
+++ b/src/scalap/scala/tools/scalap/Classfile.scala
@@ -32,7 +32,7 @@ class Classfile(in: ByteArrayReader) {
var attribs: List[Attribute] = Nil
var i = 0
while (i < n) {
- attribs = Attribute(in.nextChar, in.nextBytes(in.nextInt)) :: attribs
+ attribs = Attribute(in.nextChar.toInt, in.nextBytes(in.nextInt)) :: attribs
i = i + 1
}
attribs
@@ -43,7 +43,7 @@ class Classfile(in: ByteArrayReader) {
var members: List[Member] = Nil
var i = 0
while (i < n) {
- members = Member(field, in.nextChar, in.nextChar, in.nextChar, readAttribs) :: members
+ members = Member(field, in.nextChar.toInt, in.nextChar.toInt, in.nextChar.toInt, readAttribs) :: members
i = i + 1
}
members
@@ -54,7 +54,7 @@ class Classfile(in: ByteArrayReader) {
var intfs: List[Int] = Nil
var i = 0
while (i < n) {
- intfs = in.nextChar :: intfs
+ intfs = in.nextChar.toInt :: intfs
i = i + 1
}
intfs
@@ -81,7 +81,7 @@ class Classfile(in: ByteArrayReader) {
case object Empty extends PoolEntry(0) { }
val entries = {
- val pool = new Array[PoolEntry](in.nextChar)
+ val pool = new Array[PoolEntry](in.nextChar.toInt)
var i = 1
while (i < pool.length) {
val tag = in.nextByte
@@ -92,7 +92,7 @@ class Classfile(in: ByteArrayReader) {
pool(i) = Empty
}
else pool(i) = tag match {
- case CONSTANT_UTF8 => UTF8(in.nextUTF8(in.nextChar))
+ case CONSTANT_UTF8 => UTF8(in.nextUTF8(in.nextChar.toInt))
case CONSTANT_UNICODE => in.skip(in.nextChar) ; Empty
case CONSTANT_CLASS => ClassRef(in.nextChar)
case CONSTANT_STRING => StringConst(in.nextChar)
diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala
index 8254c2dfce..168050096d 100644
--- a/src/scalap/scala/tools/scalap/CodeWriter.scala
+++ b/src/scalap/scala/tools/scalap/CodeWriter.scala
@@ -6,7 +6,8 @@
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
import java.io._
@@ -97,9 +98,9 @@ class CodeWriter(writer: Writer) {
def print(value: Boolean): CodeWriter = print(String.valueOf(value))
- def print(value: Byte): CodeWriter = print(String.valueOf(value))
+ def print(value: Byte): CodeWriter = print(String.valueOf(value.toInt))
- def print(value: Short): CodeWriter = print(String.valueOf(value))
+ def print(value: Short): CodeWriter = print(String.valueOf(value.toInt))
def print(value: Char): CodeWriter = print(String.valueOf(value))
diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala
index d64c54a0ea..772cf6eacd 100644
--- a/src/scalap/scala/tools/scalap/JavaWriter.scala
+++ b/src/scalap/scala/tools/scalap/JavaWriter.scala
@@ -17,16 +17,15 @@ class JavaWriter(classfile: Classfile, writer: Writer) extends CodeWriter(writer
def flagsToStr(clazz: Boolean, flags: Int): String = {
val buffer = new StringBuffer()
- var x: StringBuffer = buffer
if (((flags & 0x0007) == 0) &&
((flags & 0x0002) != 0))
- x = buffer.append("private ")
+ buffer.append("private ")
if ((flags & 0x0004) != 0)
- x = buffer.append("protected ")
+ buffer.append("protected ")
if ((flags & 0x0010) != 0)
- x = buffer.append("final ")
+ buffer.append("final ")
if ((flags & 0x0400) != 0)
- x = if (clazz) buffer.append("abstract ")
+ if (clazz) buffer.append("abstract ")
else buffer.append("/*deferred*/ ")
buffer.toString()
}
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 90f8cb8d71..5da4227e53 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -5,7 +5,8 @@
**
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
import java.io.{ PrintStream, OutputStreamWriter, ByteArrayOutputStream }
import scala.reflect.NameTransformer
diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala
index 00678ab504..8b4ffb3efd 100644
--- a/src/scalap/scala/tools/scalap/MetaParser.scala
+++ b/src/scalap/scala/tools/scalap/MetaParser.scala
@@ -6,7 +6,8 @@
*/
-package scala.tools.scalap
+package scala
+package tools.scalap
import java.io._
import java.util._
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
index 1500b81050..489a05ecd0 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/Rule.scala
@@ -50,7 +50,7 @@ trait Rule[-In, +Out, +A, +X] extends (In => Result[Out, A, X]) {
lazy val choices = Rule.this :: other :: Nil
}
- def orError[In2 <: In] = this orElse(error[In2])
+ def orError[In2 <: In] = this orElse error[Any]
def |[In2 <: In, Out2 >: Out, A2 >: A, X2 >: X](other : => Rule[In2, Out2, A2, X2]) = orElse(other)
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
index aa5acbb06d..fd70e0de35 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala
@@ -167,57 +167,10 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
val symbolInfo = nameRef ~ symbolRef ~ nat ~ (symbolRef?) ~ ref ~ get ^~~~~~^ SymbolInfo
- def symHeader(key: Int) = (key -~ none | (key + 64) -~ nat)
+ def symHeader(key: Int): EntryParser[Any] = (key -~ none | (key + 64) -~ nat)
def symbolEntry(key : Int) = symHeader(key) -~ symbolInfo
- /***************************************************
- * Symbol table attribute format:
- * Symtab = nentries_Nat {Entry}
- * Entry = 1 TERMNAME len_Nat NameInfo
- * | 2 TYPENAME len_Nat NameInfo
- * | 3 NONEsym len_Nat
- * | 4 TYPEsym len_Nat SymbolInfo
- * | 5 ALIASsym len_Nat SymbolInfo
- * | 6 CLASSsym len_Nat SymbolInfo [thistype_Ref]
- * | 7 MODULEsym len_Nat SymbolInfo
- * | 8 VALsym len_Nat [defaultGetter_Ref /* no longer needed*/] SymbolInfo [alias_Ref]
- * | 9 EXTref len_Nat name_Ref [owner_Ref]
- * | 10 EXTMODCLASSref len_Nat name_Ref [owner_Ref]
- * | 11 NOtpe len_Nat
- * | 12 NOPREFIXtpe len_Nat
- * | 13 THIStpe len_Nat sym_Ref
- * | 14 SINGLEtpe len_Nat type_Ref sym_Ref
- * | 15 CONSTANTtpe len_Nat constant_Ref
- * | 16 TYPEREFtpe len_Nat type_Ref sym_Ref {targ_Ref}
- * | 17 TYPEBOUNDStpe len_Nat tpe_Ref tpe_Ref
- * | 18 REFINEDtpe len_Nat classsym_Ref {tpe_Ref}
- * | 19 CLASSINFOtpe len_Nat classsym_Ref {tpe_Ref}
- * | 20 METHODtpe len_Nat tpe_Ref {sym_Ref}
- * | 21 POLYTtpe len_Nat tpe_Ref {sym_Ref}
- * | 22 IMPLICITMETHODtpe len_Nat tpe_Ref {sym_Ref} /* no longer needed */
- * | 52 SUPERtpe len_Nat tpe_Ref tpe_Ref
- * | 24 LITERALunit len_Nat
- * | 25 LITERALboolean len_Nat value_Long
- * | 26 LITERALbyte len_Nat value_Long
- * | 27 LITERALshort len_Nat value_Long
- * | 28 LITERALchar len_Nat value_Long
- * | 29 LITERALint len_Nat value_Long
- * | 30 LITERALlong len_Nat value_Long
- * | 31 LITERALfloat len_Nat value_Long
- * | 32 LITERALdouble len_Nat value_Long
- * | 33 LITERALstring len_Nat name_Ref
- * | 34 LITERALnull len_Nat
- * | 35 LITERALclass len_Nat tpe_Ref
- * | 36 LITERALenum len_Nat sym_Ref
- * | 40 SYMANNOT len_Nat sym_Ref AnnotInfoBody
- * | 41 CHILDREN len_Nat sym_Ref {sym_Ref}
- * | 42 ANNOTATEDtpe len_Nat [sym_Ref /* no longer needed */] tpe_Ref {annotinfo_Ref}
- * | 43 ANNOTINFO len_Nat AnnotInfoBody
- * | 44 ANNOTARGARRAY len_Nat {constAnnotArg_Ref}
- * | 47 DEBRUIJNINDEXtpe len_Nat level_Nat index_Nat
- * | 48 EXISTENTIALtpe len_Nat type_Ref {symbol_Ref}
- */
val noSymbol = 3 -^ NoSymbol
val typeSymbol = symbolEntry(4) ^^ TypeSymbol as "typeSymbol"
val aliasSymbol = symbolEntry(5) ^^ AliasSymbol as "alias"
@@ -260,10 +213,9 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules {
22 -~ typeRef ~ (symbolRef*) ^~^ MethodType,
42 -~ typeRef ~ (attribTreeRef*) ^~^ AnnotatedType,
51 -~ typeRef ~ symbolRef ~ (attribTreeRef*) ^~~^ AnnotatedWithSelfType,
- 47 -~ typeLevel ~ typeIndex ^~^ DeBruijnIndexType,
48 -~ typeRef ~ (symbolRef*) ^~^ ExistentialType) as "type"
- lazy val literal = oneOf(
+ lazy val literal: EntryParser[Any] = oneOf(
24 -^ (()),
25 -~ longValue ^^ (_ != 0L),
26 -~ longValue ^^ (_.toByte),
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index cfe615a6d5..e5a4ff649e 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -70,7 +70,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
}
def isCaseClassObject(o: ObjectSymbol): Boolean = {
- val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+ val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
o.isFinal && (classSymbol.children.find(x => x.isCase && x.isInstanceOf[MethodSymbol]) match {
case Some(_) => true
case None => false
@@ -167,7 +167,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
print("object ")
val poName = o.symbolInfo.owner.name
print(processName(poName))
- val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+ val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
printType(classSymbol)
print(" {\n")
printChildren(level, classSymbol)
@@ -179,7 +179,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
printModifiers(o)
print("object ")
print(processName(o.name))
- val TypeRefType(prefix, classSymbol: ClassSymbol, typeArgs) = o.infoType
+ val TypeRefType(_, classSymbol: ClassSymbol, _) = o.infoType
printType(classSymbol)
print(" {\n")
printChildren(level, classSymbol)
@@ -191,7 +191,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
val j = str.indexOf("[")
if (j > 0) str = str.substring(0, j)
str = StringUtil.trimStart(str, "=> ")
- var i = str.lastIndexOf(".")
+ val i = str.lastIndexOf(".")
val res = if (i > 0) str.substring(i + 1) else str
if (res.length > 1) StringUtil.decapitalize(res.substring(0, 1)) else res.toLowerCase
})
@@ -381,7 +381,6 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
toString(typeRef, sep)
}
case AnnotatedWithSelfType(typeRef, symbol, attribTreeRefs) => toString(typeRef, sep)
- //case DeBruijnIndexType(typeLevel, typeIndex) =>
case ExistentialType(typeRef, symbols) => {
val refs = symbols.map(toString _).filter(!_.startsWith("_")).map("type " + _)
toString(typeRef, sep) + (if (refs.size > 0) refs.mkString(" forSome {", "; ", "}") else "")
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
index 543ddbe186..0444e701f2 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala
@@ -22,5 +22,4 @@ case class PolyType(typeRef : Type, symbols : Seq[TypeSymbol]) extends Type
case class PolyTypeWithCons(typeRef : Type, symbols : Seq[TypeSymbol], cons: String) extends Type
case class AnnotatedType(typeRef : Type, attribTreeRefs : List[Int]) extends Type
case class AnnotatedWithSelfType(typeRef : Type, symbol : Symbol, attribTreeRefs : List[Int]) extends Type
-case class DeBruijnIndexType(typeLevel : Int, typeIndex : Int) extends Type
case class ExistentialType(typeRef : Type, symbols : Seq[Symbol]) extends Type
diff --git a/src/swing/scala/swing/Action.scala b/src/swing/scala/swing/Action.scala
index 8740f63e98..98b9fee2ee 100644
--- a/src/swing/scala/swing/Action.scala
+++ b/src/swing/scala/swing/Action.scala
@@ -35,7 +35,12 @@ object Action {
// TODO: we need an action cache
private var _action: Action = Action.NoAction
def action: Action = _action
- def action_=(a: Action) { _action = a; peer.setAction(a.peer) }
+ def action_=(a: Action) {
+ _action = a;
+
+ import scala.language.reflectiveCalls
+ peer.setAction(a.peer)
+ }
//1.6: def hideActionText: Boolean = peer.getHideActionText
//def hideActionText_=(b: Boolean) = peer.setHideActionText(b)
diff --git a/src/swing/scala/swing/Button.scala b/src/swing/scala/swing/Button.scala
index f10d49d804..0170727e3b 100644
--- a/src/swing/scala/swing/Button.scala
+++ b/src/swing/scala/swing/Button.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
object Button {
diff --git a/src/swing/scala/swing/ButtonGroup.scala b/src/swing/scala/swing/ButtonGroup.scala
index 2075df7c92..0b04d20837 100644
--- a/src/swing/scala/swing/ButtonGroup.scala
+++ b/src/swing/scala/swing/ButtonGroup.scala
@@ -8,9 +8,7 @@
package scala.swing
-import event._
-import javax.swing.{AbstractButton => JAbstractButton,Icon}
-import scala.collection.{ mutable, immutable }
+import scala.collection.mutable
/**
* A button mutex. At most one of its associated buttons is selected
diff --git a/src/swing/scala/swing/ColorChooser.scala b/src/swing/scala/swing/ColorChooser.scala
new file mode 100644
index 0000000000..591e652f1c
--- /dev/null
+++ b/src/swing/scala/swing/ColorChooser.scala
@@ -0,0 +1,45 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.swing
+
+import javax.swing.JColorChooser
+import event._
+
+/**
+ * Wrapper for JColorChooser. Publishes `ColorChanged` events, when the color selection changes.
+ *
+ * @author andy@hicks.net
+ * @author Ingo Maier
+ * @see javax.swing.JColorChooser
+ */
+object ColorChooser {
+ def showDialog(parent: Component, title: String, color: Color): scala.Option[Color] = {
+ toOption[Color](javax.swing.JColorChooser.showDialog(parent.peer, title, color))
+ }
+}
+
+class ColorChooser(color0: Color) extends Component {
+ def this() = this(java.awt.Color.white)
+
+ override lazy val peer: JColorChooser = new JColorChooser(color0) with SuperMixin
+
+ peer.getSelectionModel.addChangeListener(new javax.swing.event.ChangeListener {
+ def stateChanged(e: javax.swing.event.ChangeEvent) {
+ publish(ColorChanged(ColorChooser.this, peer.getColor))
+ }
+ })
+
+ def color: Color = peer.getColor
+ def color_=(c: Color) = peer.setColor(c)
+
+ def dragEnabled: Boolean = peer.getDragEnabled
+ def dragEnabled_=(b: Boolean) = peer.setDragEnabled(b)
+} \ No newline at end of file
diff --git a/src/swing/scala/swing/ComboBox.scala b/src/swing/scala/swing/ComboBox.scala
index 5b70f6fda9..a075d22a7e 100644
--- a/src/swing/scala/swing/ComboBox.scala
+++ b/src/swing/scala/swing/ComboBox.scala
@@ -11,6 +11,7 @@ package scala.swing
import event._
import javax.swing.{JList, JComponent, JComboBox, JTextField, ComboBoxModel, AbstractListModel, ListCellRenderer}
import java.awt.event.ActionListener
+import scala.language.implicitConversions
object ComboBox {
/**
@@ -182,7 +183,7 @@ class ComboBox[A](items: Seq[A]) extends Component with Publisher {
* of the component to its own defaults _after_ the renderer has been
* configured. That's Swing's principle of most suprise.
*/
- def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getRenderer)
+ def renderer: ListView.Renderer[A] = ListView.Renderer.wrap[A](peer.getRenderer)
def renderer_=(r: ListView.Renderer[A]) { peer.setRenderer(r.peer) }
/* XXX: currently not safe to expose:
diff --git a/src/swing/scala/swing/Component.scala b/src/swing/scala/swing/Component.scala
index b7dd856d09..3a47784562 100644
--- a/src/swing/scala/swing/Component.scala
+++ b/src/swing/scala/swing/Component.scala
@@ -83,9 +83,9 @@ abstract class Component extends UIElement {
* Used by certain layout managers, e.g., BoxLayout or OverlayLayout to
* align components relative to each other.
*/
- def xLayoutAlignment: Double = peer.getAlignmentX
+ def xLayoutAlignment: Double = peer.getAlignmentX.toDouble
def xLayoutAlignment_=(x: Double) = peer.setAlignmentX(x.toFloat)
- def yLayoutAlignment: Double = peer.getAlignmentY
+ def yLayoutAlignment: Double = peer.getAlignmentY.toDouble
def yLayoutAlignment_=(y: Double) = peer.setAlignmentY(y.toFloat)
def border: Border = peer.getBorder
diff --git a/src/swing/scala/swing/EditorPane.scala b/src/swing/scala/swing/EditorPane.scala
index b8c506daf0..9b1aab7874 100644
--- a/src/swing/scala/swing/EditorPane.scala
+++ b/src/swing/scala/swing/EditorPane.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
package scala.swing
-import event._
import javax.swing._
import javax.swing.text._
-import java.awt.event._
/**
* A text component that allows multiline text input and display.
diff --git a/src/swing/scala/swing/Font.scala.disabled b/src/swing/scala/swing/Font.scala.disabled
deleted file mode 100644
index 9e21eb859c..0000000000
--- a/src/swing/scala/swing/Font.scala.disabled
+++ /dev/null
@@ -1,70 +0,0 @@
-package scala.swing
-
-/*object Font {
- def apply(fontFormat: Int, fontFile: java.io.File) = java.awt.Font.createFont(fontFormat, fontFile)
- def apply(fontFormat: Int, fontStream: java.io.InputStream) = java.awt.Font.createFont(fontFormat, fontStream)
- def decode(str: String) = java.awt.Font.decode(str)
-
- /* TODO: finish implementation
- /**
- * See [java.awt.Font.getFont].
- */
- def get(attributes: Map[_ <: java.text.AttributedCharacterIterator.Attribute, _]) =
- java.awt.Font.getFont(ImmutableMapWrapper(attributes))
-
- import java.{util => ju}
- private case class ImmutableMapWrapper[A, B](underlying : Map[A, B])(t : ClassTag[A]) extends ju.AbstractMap[A, B] {
- self =>
- override def size = underlying.size
-
- override def put(k : A, v : B) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
- override def remove(k : AnyRef) =
- throw new UnsupportedOperationException("This is a wrapper that does not support mutation")
-
- override def entrySet : ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
-
- def hasNext = ui.hasNext
-
- def next = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def equals(other : Any) = other match {
- case e : ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove = prev match {
- case Some(k) => val v = self.remove(k.asInstanceOf[AnyRef]) ; prev = None ; v
- case _ => throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
- */
-
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String) = java.awt.Font.getFont(nm)
- /**
- * See [java.awt.Font.getFont].
- */
- def get(nm: String, font: Font) = java.awt.Font.getFont(nm, font)
-
- def Insets(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Rectangle(x: Int, y: Int, width: Int, height: Int) = new Insets(x, y, width, height)
- def Point(x: Int, y: Int) = new Point(x, y)
- def Dimension(x: Int, y: Int) = new Dimension(x, y)
-}*/ \ No newline at end of file
diff --git a/src/swing/scala/swing/FormattedTextField.scala b/src/swing/scala/swing/FormattedTextField.scala
index 311ff42d0a..b08075850c 100644
--- a/src/swing/scala/swing/FormattedTextField.scala
+++ b/src/swing/scala/swing/FormattedTextField.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
-import java.awt.event._
object FormattedTextField {
/**
diff --git a/src/swing/scala/swing/GridBagPanel.scala b/src/swing/scala/swing/GridBagPanel.scala
index 7d181af4d8..c58d398737 100644
--- a/src/swing/scala/swing/GridBagPanel.scala
+++ b/src/swing/scala/swing/GridBagPanel.scala
@@ -9,6 +9,7 @@
package scala.swing
import java.awt.{GridBagConstraints, GridBagLayout}
+import scala.language.implicitConversions
object GridBagPanel {
object Fill extends Enumeration {
diff --git a/src/swing/scala/swing/ListView.scala b/src/swing/scala/swing/ListView.scala
index 40639aa9e2..d0c4e45190 100644
--- a/src/swing/scala/swing/ListView.scala
+++ b/src/swing/scala/swing/ListView.scala
@@ -216,7 +216,7 @@ class ListView[A] extends Component {
def adjusting = peer.getSelectionModel.getValueIsAdjusting
}
- def renderer: ListView.Renderer[A] = ListView.Renderer.wrap(peer.getCellRenderer)
+ def renderer: ListView.Renderer[A] = ListView.Renderer.wrap[A](peer.getCellRenderer)
def renderer_=(r: ListView.Renderer[A]) { peer.setCellRenderer(r.peer) }
def fixedCellWidth = peer.getFixedCellWidth
diff --git a/src/swing/scala/swing/MainFrame.scala b/src/swing/scala/swing/MainFrame.scala
index 85ce0755ac..1dfc155f9c 100644
--- a/src/swing/scala/swing/MainFrame.scala
+++ b/src/swing/scala/swing/MainFrame.scala
@@ -6,12 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
-
/**
* A frame that can be used for main application windows. Shuts down the
* framework and quits the application when closed.
diff --git a/src/swing/scala/swing/Orientable.scala b/src/swing/scala/swing/Orientable.scala
index a73bafb9d3..db7cf09f27 100644
--- a/src/swing/scala/swing/Orientable.scala
+++ b/src/swing/scala/swing/Orientable.scala
@@ -12,7 +12,10 @@ package scala.swing
object Orientable {
trait Wrapper extends Oriented.Wrapper with Orientable {
- def orientation_=(o: Orientation.Value) { peer.setOrientation(o.id) }
+ def orientation_=(o: Orientation.Value) {
+ import scala.language.reflectiveCalls
+ peer.setOrientation(o.id)
+ }
}
}
diff --git a/src/swing/scala/swing/Oriented.scala b/src/swing/scala/swing/Oriented.scala
index 7996d21898..72d7d574e2 100644
--- a/src/swing/scala/swing/Oriented.scala
+++ b/src/swing/scala/swing/Oriented.scala
@@ -22,6 +22,7 @@ object Oriented {
def getOrientation(): Int
def setOrientation(n: Int)
}
+ import scala.language.reflectiveCalls
def orientation: Orientation.Value = Orientation(peer.getOrientation)
}
}
diff --git a/src/swing/scala/swing/PasswordField.scala b/src/swing/scala/swing/PasswordField.scala
index d2fdd0d38a..fd0b586a0f 100644
--- a/src/swing/scala/swing/PasswordField.scala
+++ b/src/swing/scala/swing/PasswordField.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
-import java.awt.event._
/**
* A password field, that displays a replacement character for each character in the password.
diff --git a/src/swing/scala/swing/PopupMenu.scala b/src/swing/scala/swing/PopupMenu.scala
new file mode 100644
index 0000000000..e82c3a1b28
--- /dev/null
+++ b/src/swing/scala/swing/PopupMenu.scala
@@ -0,0 +1,65 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+
+
+package scala.swing
+
+import javax.swing.JPopupMenu
+import javax.swing.event.{PopupMenuListener, PopupMenuEvent}
+import event._
+
+/**
+ * A popup menu.
+ *
+ * Example usage:
+ *
+ * {{{
+ * val popupMenu = new PopupMenu {
+ * contents += new Menu("menu 1") {
+ * contents += new RadioMenuItem("radio 1.1")
+ * contents += new RadioMenuItem("radio 1.2")
+ * }
+ * contents += new Menu("menu 2") {
+ * contents += new RadioMenuItem("radio 2.1")
+ * contents += new RadioMenuItem("radio 2.2")
+ * }
+ * }
+ * val button = new Button("Show Popup Menu")
+ * reactions += {
+ * case e: ButtonClicked => popupMenu.show(button, 0, button.bounds.height)
+ * }
+ * listenTo(button)
+ * }}}
+ *
+ * @author John Sullivan
+ * @author Ingo Maier
+ * @see javax.swing.JPopupMenu
+ */
+class PopupMenu extends Component with SequentialContainer.Wrapper with Publisher {
+ override lazy val peer: JPopupMenu = new JPopupMenu with SuperMixin
+
+ peer.addPopupMenuListener(new PopupMenuListener {
+ def popupMenuCanceled(e: PopupMenuEvent) {
+ publish(PopupMenuCanceled(PopupMenu.this))
+ }
+ def popupMenuWillBecomeInvisible(e: PopupMenuEvent) {
+ publish(PopupMenuWillBecomeInvisible(PopupMenu.this))
+ }
+ def popupMenuWillBecomeVisible(e: PopupMenuEvent) {
+ publish(PopupMenuWillBecomeVisible(PopupMenu.this))
+ }
+ })
+
+ def show(invoker: Component, x: Int, y: Int): Unit = peer.show(invoker.peer, x, y)
+
+ def margin: Insets = peer.getMargin
+ def label: String = peer.getLabel
+ def label_=(s: String) { peer.setLabel(s) }
+}
+
diff --git a/src/swing/scala/swing/ProgressBar.scala b/src/swing/scala/swing/ProgressBar.scala
index 33dd716524..81e2989c3e 100644
--- a/src/swing/scala/swing/ProgressBar.scala
+++ b/src/swing/scala/swing/ProgressBar.scala
@@ -6,12 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
-
/**
* A bar indicating progress of some action. Can be in indeterminate mode,
* in which it indicates that the action is in progress (usually by some
diff --git a/src/swing/scala/swing/Reactions.scala b/src/swing/scala/swing/Reactions.scala
index d8a62aa99d..c32212cf3a 100644
--- a/src/swing/scala/swing/Reactions.scala
+++ b/src/swing/scala/swing/Reactions.scala
@@ -14,8 +14,6 @@ import event.Event
import scala.collection.mutable.{Buffer, ListBuffer}
object Reactions {
- import scala.ref._
-
class Impl extends Reactions {
private val parts: Buffer[Reaction] = new ListBuffer[Reaction]
def isDefinedAt(e: Event) = parts.exists(_ isDefinedAt e)
diff --git a/src/swing/scala/swing/Slider.scala b/src/swing/scala/swing/Slider.scala
index e329c31a01..241876587b 100644
--- a/src/swing/scala/swing/Slider.scala
+++ b/src/swing/scala/swing/Slider.scala
@@ -53,7 +53,7 @@ class Slider extends Component with Orientable.Wrapper with Publisher {
def labels: scala.collection.Map[Int, Label] = {
val labelTable = peer.getLabelTable.asInstanceOf[java.util.Hashtable[Int, JLabel]]
- new scala.collection.JavaConversions.JMapWrapper(labelTable)
+ new scala.collection.convert.Wrappers.JMapWrapper(labelTable)
.mapValues(v => UIElement.cachedWrapper[Label](v))
}
def labels_=(l: scala.collection.Map[Int, Label]) {
diff --git a/src/swing/scala/swing/SplitPane.scala b/src/swing/scala/swing/SplitPane.scala
index dd4f2908d5..f61dfedbf4 100644
--- a/src/swing/scala/swing/SplitPane.scala
+++ b/src/swing/scala/swing/SplitPane.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import Swing._
/**
diff --git a/src/swing/scala/swing/Swing.scala b/src/swing/scala/swing/Swing.scala
index cd5bbf2c4f..e2c8479354 100644
--- a/src/swing/scala/swing/Swing.scala
+++ b/src/swing/scala/swing/Swing.scala
@@ -14,6 +14,7 @@ import java.awt.event._
import javax.swing.event._
import javax.swing.border._
import javax.swing.{JComponent, Icon, BorderFactory, SwingUtilities}
+import scala.language.implicitConversions
/**
diff --git a/src/swing/scala/swing/SwingWorker.scala b/src/swing/scala/swing/SwingWorker.scala
deleted file mode 100644
index f4eeb5824a..0000000000
--- a/src/swing/scala/swing/SwingWorker.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-package scala.swing
-
-import scala.actors._
-
-@deprecated("Will be removed in 2.11.0", "2.10.1")
-object SwingWorker {
-
-}
-
-@deprecated("Depends on the deprecated package scala.actors. Will be removed in 2.11.0", "2.10.1")
-abstract class SwingWorker extends Actor {
- def queue() {
-
- }
-
- def done() {
-
- }
-
- private var _cancelled = false
- def cancelled: Boolean = _cancelled
- def cancelled_=(b: Boolean) { _cancelled = b }
-}
diff --git a/src/swing/scala/swing/TabbedPane.scala b/src/swing/scala/swing/TabbedPane.scala
index 338050515a..6e46256f86 100644
--- a/src/swing/scala/swing/TabbedPane.scala
+++ b/src/swing/scala/swing/TabbedPane.scala
@@ -112,9 +112,6 @@ class TabbedPane extends Component with Publisher {
*/
def tabPlacement_=(b: Alignment.Value) { peer.setTabPlacement(b.id) }
- @deprecated("Use tabPlacement_=() instead.", "2.9.1")
- def tabPlacement(b: Alignment.Value) { peer.setTabPlacement(b.id) }
-
/**
* The current page selection
*/
diff --git a/src/swing/scala/swing/TextArea.scala b/src/swing/scala/swing/TextArea.scala
index 01bf115d28..2f6bdca119 100644
--- a/src/swing/scala/swing/TextArea.scala
+++ b/src/swing/scala/swing/TextArea.scala
@@ -6,13 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
-import java.awt.event._
/**
* A text component that allows multiline text input and display.
diff --git a/src/swing/scala/swing/TextComponent.scala b/src/swing/scala/swing/TextComponent.scala
index 48c03a5f54..4d23399737 100644
--- a/src/swing/scala/swing/TextComponent.scala
+++ b/src/swing/scala/swing/TextComponent.scala
@@ -6,12 +6,9 @@
** |/ **
\* */
-
-
package scala.swing
import event._
-import javax.swing._
import javax.swing.text._
import javax.swing.event._
diff --git a/src/swing/scala/swing/ToggleButton.scala b/src/swing/scala/swing/ToggleButton.scala
index 3d3d0b957f..8f210d00d8 100644
--- a/src/swing/scala/swing/ToggleButton.scala
+++ b/src/swing/scala/swing/ToggleButton.scala
@@ -6,11 +6,8 @@
** |/ **
\* */
-
-
package scala.swing
-import event._
import javax.swing._
/**
diff --git a/src/swing/scala/swing/Window.scala b/src/swing/scala/swing/Window.scala
index 5bdb50e959..a9f4ae7538 100644
--- a/src/swing/scala/swing/Window.scala
+++ b/src/swing/scala/swing/Window.scala
@@ -6,13 +6,10 @@
** |/ **
\* */
-
-
package scala.swing
import java.awt.{Window => AWTWindow}
import event._
-import javax.swing._
/**
* A window with decoration such as a title, border, and action buttons.
diff --git a/src/swing/scala/swing/SwingActor.scala b/src/swing/scala/swing/event/ColorChanged.scala
index 035e979bee..6cb35e7755 100644
--- a/src/swing/scala/swing/SwingActor.scala
+++ b/src/swing/scala/swing/event/ColorChanged.scala
@@ -9,7 +9,6 @@
package scala.swing
+package event
-// Dummy to keep ant from recompiling on every run.
-@deprecated("Will be removed in 2.11.0", "2.10.1")
-trait SwingActor { }
+case class ColorChanged(source: Component, c: Color) extends ComponentEvent with SelectionEvent
diff --git a/src/library/scala/SpecializableCompanion.scala b/src/swing/scala/swing/event/PopupMenuEvent.scala
index 1a9ce71d2a..ad39eb351a 100644
--- a/src/library/scala/SpecializableCompanion.scala
+++ b/src/swing/scala/swing/event/PopupMenuEvent.scala
@@ -1,14 +1,18 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-package scala
-/** A common supertype for companion classes which specialization takes into account.
- */
-@deprecated("Use Specializable instead", "2.10.0")
-private[scala] trait SpecializableCompanion
+
+package scala.swing
+package event
+
+abstract class PopupMenuEvent extends ComponentEvent
+
+case class PopupMenuCanceled(source: PopupMenu) extends PopupMenuEvent
+case class PopupMenuWillBecomeInvisible(source: PopupMenu) extends PopupMenuEvent
+case class PopupMenuWillBecomeVisible(source: PopupMenu) extends PopupMenuEvent \ No newline at end of file
diff --git a/src/swing/scala/swing/package.scala b/src/swing/scala/swing/package.scala
index 45497665d7..d5095f021b 100644
--- a/src/swing/scala/swing/package.scala
+++ b/src/swing/scala/swing/package.scala
@@ -14,9 +14,6 @@ package object swing {
type Image = java.awt.Image
type Font = java.awt.Font
- implicit lazy val reflectiveCalls = scala.language.reflectiveCalls
- implicit lazy val implicitConversions = scala.language.implicitConversions
-
private[swing] def ifNull[A](o: Object, a: A): A = if(o eq null) a else o.asInstanceOf[A]
private[swing] def toOption[A](o: Object): Option[A] = if(o eq null) None else Some(o.asInstanceOf[A])
private[swing] def toAnyRef(x: Any): AnyRef = x.asInstanceOf[AnyRef]